예제 #1
0
def postAddData(gdbPath, mdName, info):
    global randomCount

    mdName = info['md']
    obvalue = info['pre_AddRasters_record_count']
    fullPath = os.path.join(gdbPath, mdName)

    mosaicMDType = info['type'].lower()

    if (mosaicMDType == 'source'):
        lyrName = "Mosaiclayer" + str(randomCount)
        randomCount += 1
        expression = "OBJECTID >" + str(obvalue)

        try:
            arcpy.MakeMosaicLayer_management(fullPath, lyrName, expression)
        except:
            log.Message("Failed to make mosaic layer (%s)" % (lyrName),
                        log.const_critical_text)
            log.Message(arcpy.GetMessages(), log.const_critical_text)

            return False

        try:
            fieldName = 'dataset_id'
            fieldExist = arcpy.ListFields(fullPath, fieldName)
            if len(fieldExist) == 0:
                arcpy.AddField_management(fullPath, fieldName, "TEXT", "", "",
                                          "50")
            log.Message(
                "Calculating \'Dataset ID\' for the mosaic dataset (%s) with value (%s)"
                % (mdName, info['Dataset_ID']), log.const_general_text)
            arcpy.CalculateField_management(lyrName, fieldName,
                                            "\"" + info['Dataset_ID'] + "\"",
                                            "PYTHON")
        except:
            log.Message('Failed to calculate \'Dataset_ID\'',
                        log.const_critical_text)
            log.Message(arcpy.GetMessages(), log.const_critical_text)

            return False

        try:
            arcpy.Delete_management(lyrName)
        except:
            log.Message("Failed to delete the layer", log.const_critical_text)
            log.Message(arcpy.GetMessages(), log.const_critical_text)

            return False

    return True
def ReloadMD():
    # Empty the mosaic dataset prior to reloading it
    arcpy.AddMessage("Removing previous forecast data from mosaic dataset...")
    arcpy.RemoveRastersFromMosaicDataset_management(inputMD, "1=1")
    # Add the rasters to the mosaic dataset
    arcpy.AddMessage("Adding new forecast data to mosaic dataset...")
    arcpy.AddRastersToMosaicDataset_management(inputMD, "Raster Dataset", forecastGDBPath)
    # Check something was imported
    result = int(arcpy.GetCount_management(inputMD).getOutput(0))
    if result > 0:
        # Re-calculate statistics on the mosaic dataset
        arcpy.AddMessage("Calculating statistics on the newly loaded mosaic dataset")
        arcpy.CalculateStatistics_management(inputMD)
        # Re-build overviews on the mosaic dataset
        #arcpy.AddMessage("Building overviews on the mosaic dataset")
        #arcpy.BuildOverviews_management(inputMD)
        # Calculate the time fields on the mosaic dataset
        arcpy.AddMessage("Calculating the time fields on the mosaic dataset")
        locale.setlocale(locale.LC_TIME, '')
        mdLayer = "mdLayer"
        arcpy.MakeMosaicLayer_management(inputMD, mdLayer, "Category = 1") # Leave out overviews - only calculate fields on primary rasters
        arcpy.CalculateField_management(mdLayer, dateForecastImportedField, """time.strftime("%c")""", "PYTHON","#")
        arcpy.CalculateField_management(mdLayer, dateForecastEffectiveFromField, """time.strftime("%c", time.strptime(!Name!,""" + "\"" + weatherName + """%Y%m%dT%H%M"))""", "PYTHON", "#")
        arcpy.CalculateField_management(mdLayer, dateForecastEffectiveToField, "!" + dateForecastEffectiveFromField + "!", "PYTHON", "#")
				df.zoomToSelectedFeatures() # zoom to selected parcels that intersect merged buffer
				arcpy.RefreshActiveView() # refresh dataframe view
				df_pdf = outpath+'/'+df.name+".pdf" # create pdf file name in outpath with dataframe name (which should be year)
				arcpy.AddMessage("Overwriting "+df_pdf+" if already existed")
				silentremove(df_pdf) # delete intermediate pdf if already exists
                ## if 'Include Aerials?" = true (checked), do this
				if str(ischecked) == 'true':
					aerialDir = r"\\bcad90\doqq\Bexar_Aerial_Mosaics" # Directory with Bexar County aerial mosaics
					aerialGDB = aerialDir+r"\Bexar_Aerial_Mosaics.gdb" # gdb inside aerial directory with aerial mosaics
					arcpy.env.workspace = aerialGDB # set env.workspace to aerial gdb
					in_mosaicdataset_name = r"\bexarMosaics_"+df.name # input mosaic dataset name as bexarMosaics_[year], following naming convention in gdb
					in_mosaicLayer = in_mosaicdataset_name+"_layer"
					silentremove(in_mosaicLayer)       # remove input mosaic layer if exists
					if arcpy.Exists(aerialGDB+in_mosaicdataset_name):
						par.transparency = 75 # if adding imagery, set parcel transparency to 80.
						arcpy.MakeMosaicLayer_management(aerialGDB+in_mosaicdataset_name,in_mosaicdataset_name+"_layer")
						layer = arcpy.mapping.Layer(in_mosaicdataset_name+"_layer")        # make mosaic dataset layer a mapping layer
						arcpy.AddMessage("Adding " + df.name + " aerial imagery to "+ mxdname)
						arcpy.mapping.AddLayer(df,layer,"BOTTOM")		# add aerial imagery to bottom of dataframe
						layers = arcpy.mapping.ListLayers(df)  
						for l in layers:  
                                                    if l.isGroupLayer and l.name == layer.name:  
                                                        glayers = arcpy.mapping.ListLayers(l)  
                                                        for gl in glayers:  
                                                                if gl.name == "Footprint":  
                                                                        gl.transparency = 100
                                                                        break
                                                silentremove(in_mosaicLayer) # delete mosaic layer created
                                                del in_mosaicLayer # remove lock on mosaic layer
                                                del layer # remove lock on layer
					else:
예제 #4
0
def execute(request):
    """Mosaics input raster datasets into a new raster dataset or mosaic dataset.
    :param request: json as a dict.
    """
    status_writer = status.Writer()
    parameters = request['params']
    target_workspace = task_utils.get_parameter_value(parameters, 'target_workspace', 'value')
    output_name = task_utils.get_parameter_value(parameters, 'output_dataset_name', 'value')
    out_coordinate_system = task_utils.get_parameter_value(parameters, 'output_projection', 'code')
    # Advanced options
    output_raster_format = task_utils.get_parameter_value(parameters, 'raster_format', 'value')
    compression_method = task_utils.get_parameter_value(parameters, 'compression_method', 'value')
    compression_quality = task_utils.get_parameter_value(parameters, 'compression_quality', 'value')
    arcpy.env.compression = '{0} {1}'.format(compression_method, compression_quality)

    if output_raster_format in ('FileGDB', 'MosaicDataset'):
        if not os.path.splitext(target_workspace)[1] in ('.gdb', '.mdb', '.sde'):
            status_writer.send_state(status.STAT_FAILED, _('Target workspace must be a geodatabase'))
            return

    task_folder = request['folder']
    if not os.path.exists(task_folder):
        os.makedirs(task_folder)

    clip_area = None
    if not output_raster_format == 'MosaicDataset':
        # Get the clip region as an extent object.
        try:
            clip_area_wkt = task_utils.get_parameter_value(parameters, 'processing_extent', 'wkt')
            if not clip_area_wkt:
                clip_area_wkt = 'POLYGON ((-180 -90, -180 90, 180 90, 180 -90, -180 -90))'
            if not out_coordinate_system == '0':
                clip_area = task_utils.get_clip_region(clip_area_wkt, out_coordinate_system)
            else:
                clip_area = task_utils.get_clip_region(clip_area_wkt)
        except KeyError:
            pass

    status_writer.send_status(_('Setting the output workspace...'))
    if not os.path.exists(target_workspace):
        status_writer.send_state(status.STAT_FAILED, _('Target workspace does not exist'))
        return
    arcpy.env.workspace = target_workspace

    status_writer.send_status(_('Starting to process...'))
    num_results, response_index = task_utils.get_result_count(parameters)
    raster_items = None
    if num_results > task_utils.CHUNK_SIZE:
        # Query the index for results in groups of 25.
        query_index = task_utils.QueryIndex(parameters[response_index])
        fl = query_index.fl
        query = '{0}{1}{2}'.format(sys.argv[2].split('=')[1], '/select?&wt=json', fl)
        fq = query_index.get_fq()
        if fq:
            groups = task_utils.grouper(range(0, num_results), task_utils.CHUNK_SIZE, '')
            query += fq
        elif 'ids' in parameters[response_index]:
            groups = task_utils.grouper(list(parameters[response_index]['ids']), task_utils.CHUNK_SIZE, '')
        else:
            groups = task_utils.grouper(range(0, num_results), task_utils.CHUNK_SIZE, '')

        headers = {'x-access-token': task_utils.get_security_token(request['owner'])}
        for group in groups:
            if fq:
                results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers)
            elif 'ids' in parameters[response_index]:
                results = requests.get(query + '{0}&ids={1}'.format(fl, ','.join(group)), headers=headers)
            else:
                results = requests.get(query + "&rows={0}&start={1}".format(task_utils.CHUNK_SIZE, group[0]), headers=headers)

            input_items = task_utils.get_input_items(results.json()['response']['docs'])
            if not input_items:
                input_items = task_utils.get_input_items(parameters[response_index]['response']['docs'])
            raster_items, pixels, bands, skipped = get_items(input_items)
    else:
        input_items = task_utils.get_input_items(parameters[response_index]['response']['docs'])
        raster_items, pixels, bands, skipped = get_items(input_items)

    if not raster_items:
        if skipped == 0:
            status_writer.send_state(status.STAT_FAILED, _('Invalid input types'))
            skipped_reasons['All Items'] = _('Invalid input types')
            task_utils.report(os.path.join(request['folder'], '__report.json'), len(raster_items), num_results, skipped_details=skipped_reasons)
            return
        else:
            status_writer.send_state(status.STAT_WARNING, _('{0} results could not be processed').format(skipped))
            task_utils.report(os.path.join(request['folder'], '__report.json'), len(raster_items), skipped, skipped_details=skipped_reasons)
            return

    # Get most common pixel type.
    pixel_type = pixel_types[max(set(pixels), key=pixels.count)]
    if output_raster_format in ('FileGDB', 'GRID', 'MosaicDataset'):
        output_name = arcpy.ValidateTableName(output_name, target_workspace)
    else:
        output_name = '{0}.{1}'.format(arcpy.ValidateTableName(output_name, target_workspace), output_raster_format.lower())

    if arcpy.Exists(os.path.join(target_workspace, output_name)):
        status_writer.send_state(status.STAT_FAILED, _('Output dataset already exists.'))
        return

    if output_raster_format == 'MosaicDataset':
        try:
            status_writer.send_status(_('Generating {0}. Large input {1} will take longer to process.'.format('Mosaic', 'rasters')))
            if out_coordinate_system == '0':
                out_coordinate_system = raster_items[0]
            else:
                out_coordinate_system = None
            mosaic_ds = arcpy.CreateMosaicDataset_management(target_workspace,
                                                             output_name,
                                                             out_coordinate_system,
                                                             max(bands),
                                                             pixel_type)
            arcpy.AddRastersToMosaicDataset_management(mosaic_ds, 'Raster Dataset', raster_items)
            arcpy.MakeMosaicLayer_management(mosaic_ds, 'mosaic_layer')
            layer_object = arcpy.mapping.Layer('mosaic_layer')
            task_utils.make_thumbnail(layer_object, os.path.join(request['folder'], '_thumb.png'))
        except arcpy.ExecuteError:
            skipped += 1
            skipped_reasons['All Items'] = arcpy.GetMessages(2)
    else:
        try:
            if len(bands) > 1:
                status_writer.send_state(status.STAT_FAILED, _('Input rasters must have the same number of bands'))
                return
            if out_coordinate_system == '0':
                out_coordinate_system = None
            status_writer.send_status(_('Generating {0}. Large input {1} will take longer to process.'.format('Mosaic', 'rasters')))
            if clip_area:
                ext = '{0} {1} {2} {3}'.format(clip_area.XMin, clip_area.YMin, clip_area.XMax, clip_area.YMax)
                tmp_mosaic = arcpy.MosaicToNewRaster_management(
                    raster_items,
                    target_workspace,
                    'tmpMosaic',
                    out_coordinate_system,
                    pixel_type,
                    number_of_bands=bands.keys()[0]
                )
                status_writer.send_status(_('Clipping...'))
                out_mosaic = arcpy.Clip_management(tmp_mosaic, ext, output_name)
                arcpy.Delete_management(tmp_mosaic)
            else:
                out_mosaic = arcpy.MosaicToNewRaster_management(raster_items,
                                                                target_workspace,
                                                                output_name,
                                                                out_coordinate_system,
                                                                pixel_type,
                                                                number_of_bands=bands.keys()[0],
                                                                mosaic_method='BLEND')
            arcpy.MakeRasterLayer_management(out_mosaic, 'mosaic_layer')
            layer_object = arcpy.mapping.Layer('mosaic_layer')
            task_utils.make_thumbnail(layer_object, os.path.join(request['folder'], '_thumb.png'))
        except arcpy.ExecuteError:
            skipped += 1
            skipped_reasons['All Items'] = arcpy.GetMessages(2)

    # Update state if necessary.
    if skipped > 0:
        status_writer.send_state(status.STAT_WARNING, _('{0} results could not be processed').format(skipped))
    task_utils.report(os.path.join(request['folder'], '__report.json'), len(raster_items), skipped, skipped_details=skipped_reasons)
예제 #5
0
def processJob(ProjectJob,
               project,
               createQARasters=False,
               createMissingRasters=True,
               overrideBorderPath=None):
    aaa = datetime.now()
    a = aaa
    lasd_boundary = None

    ProjectFolder = ProjectFolders.getProjectFolderFromDBRow(
        ProjectJob, project)
    ProjectID = ProjectJob.getProjectID(project)
    ProjectUID = ProjectJob.getUID(project)

    target_path = ProjectFolder.derived.path

    # Get the LAS QA Info to determine if it is classified or not
    las_qainfo = getLasQAInfo(ProjectFolder)
    if las_qainfo.num_las_files <= 0:
        arcpy.AddError(
            "Project with Job ID {} has no .las files in DELIVERED LAS_CLASSIFIED or LAS_UNCLASSIFIED folders, CANNOT CONTINUE."
            .format(ProjectFolder.projectId))
    else:
        ProjectFolders.createAnalysisFolders(target_path,
                                             las_qainfo.isClassified)

        # Make the STAT folder if it doesn't already exist

        stat_out_folder = ProjectFolder.derived.stats_path
        if not os.path.exists(stat_out_folder):
            os.makedirs(stat_out_folder)
            arcpy.AddMessage(
                "created Derived STAT folder '{}'".format(stat_out_folder))
        else:
            arcpy.AddMessage("STAT folder '{}'".format(stat_out_folder))

        # Make the scratch file GDB for the project
        if not os.path.exists(las_qainfo.filegdb_path):
            arcpy.CreateFileGDB_management(target_path,
                                           las_qainfo.filegdb_name)
            Utility.addToolMessages()
        else:
            arcpy.AddMessage(
                "Derived fGDB sand box already exists. Using '{}'".format(
                    las_qainfo.filegdb_path))

        las_qainfo.lasd_spatial_ref = checkSpatialOnLas(
            las_qainfo.las_directory, target_path, createQARasters,
            las_qainfo.isClassified)

        if las_qainfo.lasd_spatial_ref is None:
            arcpy.AddError(
                "ERROR:   Neither spatial reference in PRJ or LAS files are valid CANNOT CONTINUE."
            )
            arcpy.AddError(
                "ERROR:   Please create a projection file (.prj) in the LAS folder using the '3D Analyst Tools/Conversion/From File/Point File Information' tool."
            )

        elif not las_qainfo.isValidSpatialReference():
            las_qainfo.lasd_spatial_ref = None
            arcpy.AddError(
                "ERROR: Spatial Reference for the las files is not standard (see above)"
            )
            arcpy.AddError(
                "ERROR: Please create a projection file (.prj) in the LAS folder using the '3D Analyst Tools/Conversion/From File/Point File Information' tool."
            )
            try:
                arcpy.AddError("ERROR: '{}'".format(
                    Utility.getSpatialReferenceInfo(
                        las_qainfo.lasd_spatial_ref)))
            except:
                pass

        elif las_qainfo.isUnknownSpatialReference():
            las_qainfo.lasd_spatial_ref = None
            arcpy.AddError(
                "ERROR: Please provide a projection file (.prj) that provides a valid transformation in the LAS directory."
            )
            arcpy.AddError(
                "ERROR:   Please create a projection file (.prj) in the LAS folder using the '3D Analyst Tools/Conversion/From File/Point File Information' tool."
            )
            arcpy.AddError(
                "ERROR: Spatial Reference for the las files is not standard")
            try:
                arcpy.AddError("ERROR: '{}'".format(
                    Utility.getSpatialReferenceInfo(
                        las_qainfo.lasd_spatial_ref)))
            except:
                pass

        if las_qainfo.lasd_spatial_ref is None:
            raise Exception(
                "Error: Spatial Reference is invalid, unknown, or not specified."
            )
        else:
            #         prj_Count, prj_File = Utility.fileCounter(las_qainfo.las_directory, '.prj')
            #         if prj_Count > 0 and prj_File is not None and len(str(prj_File)) > 0:
            #             prj_spatial_ref = os.path.join(las_qainfo.las_directory, prj_File)
            #
            #         if prj_Count > 0:
            #             las_qainfo.setProjectionFile(prj_File)
            #             las_spatial_ref = os.path.join(las_qainfo.las_directory, prj_File)
            #             arcpy.AddMessage("Found a projection file with the las files, OVERRIDE LAS SR (if set) '{}'".format(las_spatial_ref))
            #             arcpy.AddMessage(Utility.getSpatialReferenceInfo(las_qainfo.getSpatialReference()))
            #         else:
            #             arcpy.AddMessage("Using projection (coordinate system) from las files if available.")

            fileList = getLasFileProcessList(las_qainfo.las_directory,
                                             target_path, createQARasters,
                                             las_qainfo.isClassified)
            createLasStatistics(fileList, target_path,
                                las_qainfo.lasd_spatial_ref,
                                las_qainfo.isClassified, createQARasters,
                                createMissingRasters, overrideBorderPath)

            # Create the project's las dataset. Don't do this before you validated that each .las file has a .lasx
            if os.path.exists(las_qainfo.las_dataset_path):
                arcpy.AddMessage("Using existing LAS Dataset {}".format(
                    las_qainfo.las_dataset_path))
                # arcpy.AddMessage("Deleting existing LAS Dataset {}".format(las_qainfo.las_dataset_path))
                # arcpy.Delete_management(las_qainfo.las_dataset_path)
            else:
                a = datetime.now()
                # note: don't use method in A04_B because we don't want to compute statistics this time
                arcpy.CreateLasDataset_management(
                    input=las_qainfo.las_directory,
                    out_las_dataset=las_qainfo.las_dataset_path,
                    folder_recursion="RECURSION",
                    in_surface_constraints="",
                    spatial_reference=las_qainfo.lasd_spatial_ref,
                    compute_stats="NO_COMPUTE_STATS",
                    relative_paths="RELATIVE_PATHS",
                    create_las_prj="FILES_MISSING_PROJECTION")
                Utility.addToolMessages()
                a = doTime(
                    a, "Created LAS Dataset '{}'".format(
                        las_qainfo.las_dataset_path))

            desc = arcpy.Describe(las_qainfo.las_dataset_path)

            # las_qainfo.lasd_spatial_ref = desc.SpatialReference
            las_qainfo.LASDatasetPointCount = desc.pointCount
            las_qainfo.LASDatasetFileCount = desc.fileCount
            arcpy.AddMessage(
                "LASDatasetPointCount {} and LASDatasetFileCount {}".format(
                    desc.pointCount, desc.fileCount))

            lasd_boundary, las_footprint = A04_C_ConsolidateLASInfo.createRasterBoundaryAndFootprints(
                las_qainfo.filegdb_path, target_path, ProjectID,
                ProjectFolder.path, ProjectUID)

            mxd = createMXD(las_qainfo, target_path, ProjectID)

            # if createQARasters:
            arcpy.AddMessage("Creating QA raster mosaics")
            mosaics = A04_C_ConsolidateLASInfo.createQARasterMosaics(
                las_qainfo.isClassified, las_qainfo.filegdb_path,
                las_qainfo.lasd_spatial_ref, target_path, mxd, las_footprint,
                lasd_boundary)
            if mxd is not None:
                a = datetime.now()
                try:
                    mxd_path = mxd.filePath
                    for [md_path, md_name] in mosaics:
                        arcpy.AddMessage(
                            "Adding QA raster mosaic {} to mxd {}".format(
                                md_path, mxd_path))
                        try:
                            if not arcpy.Exists(md_path):
                                a = doTime(
                                    a,
                                    "\tMD doesn't exist {}. Can't add to MXD {}. Is it open?"
                                    .format(md_path, mxd_path))
                            else:
                                df = mxd.activeDataFrame
                                if isLayerExist(mxd, df, md_name):
                                    a = doTime(
                                        a, "\t MD {} already exists in MXD {}".
                                        format(md_name, mxd_path))
                                else:
                                    if len(str(md_name)) > 0:
                                        try:
                                            lyr_md = arcpy.MakeMosaicLayer_management(
                                                in_mosaic_dataset=md_path,
                                                out_mosaic_layer=md_name
                                            ).getOutput(0)
                                            df = mxd.activeDataFrame
                                            arcpy.mapping.AddLayer(
                                                df, lyr_md, 'BOTTOM')
                                            # lyr_md.visible = False
                                            mxd.save()
                                            a = doTime(
                                                a,
                                                "\tAdded MD {} to MXD {} as {}"
                                                .format(
                                                    md_name, mxd_path, lyr_md))
                                        except:
                                            a = doTime(
                                                a,
                                                "\tfailed to add MD {} to MXD {}. Is it open?"
                                                .format(md_path, mxd_path))

                        except:
                            try:
                                a = doTime(
                                    a,
                                    "\tfailed to add MD to MXD {}. Is it open?"
                                    .format(mxd_path))
                            except:
                                pass

                    mxd.save()
                except:
                    try:
                        a = doTime(
                            a, "\tfailed to save MXD {}. Is it open?".format(
                                mxd_path))
                    except:
                        pass

    bbb = datetime.now()
    td = (bbb - aaa).total_seconds()
    arcpy.AddMessage("Completed {} in {}".format(las_qainfo.las_dataset_path,
                                                 td))

    return las_qainfo, lasd_boundary
예제 #6
0
def exportRaster(ds_name,where,fileName,download_path,sde_file):
    output_prefix=""
    output_folder=download_path+"/"+fileName
    output_width=512
    output_height=512
    source_mosaic_dataset=sde_file+"/"+ds_name
    md_fields = arcpy.ListFields(source_mosaic_dataset)
    md_fields_set = set([field.baseName.upper() for field in md_fields])

    guid_field = 'IMAGERYGUID'
    if not ('IMAGERYGUID' in md_fields_set):
        guid_field = 'OID@'

    fields = ['OID@', 'SHAPE@', guid_field]

    with arcpy.da.SearchCursor(source_mosaic_dataset, fields, where) as cursor:
        for row in cursor:
            objectid = row[0]
            extent = row[1].extent
            guid = str(row[2])
            where_clause = '"OBJECTID" = %d' % int(objectid)
            process_md_layer = 'process_md_layer_' + guid

            env_overwriteOutput = arcpy.env.overwriteOutput
            arcpy.env.overwriteOutput = True
            if output_prefix == '#' or not output_prefix:
                output_preview_path = os.path.join(output_folder, guid + '.jpg')
            else:
                output_preview_path = os.path.join(output_folder, '%s_%s.jpg' % (output_prefix, guid))

            output_cell_size = max(extent.width / output_width, extent.height / output_height)

            XCenter = (extent.XMax + extent.XMin) / 2.0
            YCenter = (extent.YMax + extent.YMin) / 2.0

            XMin = XCenter - output_width / 2.0 * output_cell_size
            XMax = XMin + output_width * output_cell_size

            YMin = YCenter - output_height / 2.0 * output_cell_size
            YMax = YMin + output_height * output_cell_size

            extent = arcpy.Extent(XMin, YMin, XMax, YMax)

            arcpy.MakeMosaicLayer_management(source_mosaic_dataset,
                                             process_md_layer,
                                             where_clause,
                                             extent,
                                             "1;2;3",
                                             "LOCK_RASTER",
                                             "",
                                             "",
                                             objectid,
                                             "ASCENDING",
                                             "FIRST",
                                             output_cell_size)
            env = {}

            if 'compression' in arcpy.env.keys():
                env['compression'] = arcpy.env.compression

            if 'rasterStatistics' in arcpy.env.keys():
                env['rasterStatistics'] = arcpy.env.rasterStatistics

            if 'pyramid' in arcpy.env.keys():
                env['pyramid'] = arcpy.env.pyramid

            if 'tileSize' in arcpy.env.keys():
                env['tileSize'] = arcpy.env.tileSize

            if 'nodata' in arcpy.env.keys():
                env['nodata'] = arcpy.env.nodata

            if 'extent' in arcpy.env.keys():
                env['extent'] = arcpy.env.extent

            arcpy.env.compression = "'JPEG_YCbCr' 75"
            arcpy.env.tileSize = "128 128"
            arcpy.env.rasterStatistics = "NONE"
            arcpy.env.pyramid = "NONE"
            arcpy.env.nodata = 'NONE'  # 'MAXIMUM' MAP_DOWN
            arcpy.env.extent = extent

            arcpy.CopyRaster_management(process_md_layer,
                                        output_preview_path,
                                        '#',
                                        '255',
                                        '255',
                                        '#',
                                        '#',
                                        '8_BIT_UNSIGNED',
                                        'ScalePixelValue',
                                        '#')

            if 'compression' in env.keys():
                arcpy.env.compression = env['compression']

            if 'rasterStatistics' in env.keys():
                arcpy.env.rasterStatistics = env['rasterStatistics']

            if 'pyramid' in env.keys():
                arcpy.env.pyramid = env['pyramid']

            if 'tileSize' in env.keys():
                arcpy.env.tileSize = env['tileSize']

            if 'nodata' in env.keys():
                arcpy.env.nodata = env['nodata']

            if 'extent' in env.keys():
                arcpy.env.extent = env['extent']

            # restore env variable
        arcpy.env.overwriteOutput = env_overwriteOutput
예제 #7
0
 def __init__(self, lname, mname):
     self.lname = lname
     arcpy.MakeMosaicLayer_management(mname, self.lname)
     self.flayer = FeatureLayer("{0}\Footprint".format(self.lname))