Exemple #1
0
def createReferenceddMosaicDataset(in_md_path, out_md_path, spatial_ref,
                                   raster_v_unit):
    a = datetime.datetime.now()
    arcpy.CreateReferencedMosaicDataset_management(
        in_dataset=in_md_path,
        out_mosaic_dataset=out_md_path,
        coordinate_system=spatial_ref,
        number_of_bands="1",
        pixel_type="32_BIT_SIGNED",
        where_clause="",
        in_template_dataset="",
        extent="",
        select_using_features="SELECT_USING_FEATURES",
        lod_field="",
        minPS_field="",
        maxPS_field="",
        pixelSize="",
        build_boundary="BUILD_BOUNDARY")

    raster_function_path = Raster.Canopy_Density_function_chain_path

    arcpy.EditRasterFunction_management(
        in_mosaic_dataset=out_md_path,
        edit_mosaic_dataset_item="EDIT_MOSAIC_DATASET",
        edit_options="REPLACE",
        function_chain_definition=raster_function_path,
        location_function_name="")
    Utility.addToolMessages()

    # arcpy.CalculateStatistics_management(in_raster_dataset=out_md_path, x_skip_factor="1", y_skip_factor="1", ignore_values="", skip_existing="OVERWRITE", area_of_interest="Feature Set")

    arcpy.AddMessage(
        "\tNOTE: !!! Please edit the MR Point Density function. Change to replace input to 'Multiply LAST by 100' with this project's POINT_COUNT_LAST mosaic dataset.\n\n\t{}\n"
        .format(out_md_path))
    doTime(a, "Created DHM '{}'".format(out_md_path))
Exemple #2
0
def clipRastersToBoundary(start_dir, boundary_path):
    a = datetime.datetime.now()
    for root, dirs, files in os.walk(start_dir):  # @UnusedVariable
        for f in files:
            if f.upper().endswith(".TIF"):
                raster_path = os.path.join(root, f)
                clipDerivedRaster(raster_path, boundary_path)

    doTime(a, "\tClip rasters {}".format(start_dir))
def processJob(ProjectJob, project, strUID):
    a = datetime.now()
    aa = a

    archive_dir = ProjectJob.getArchiveDir(project)
    basedir = ProjectJob.getProjectDir(project)
    archive_name = ProjectJob.getProjectID(project)
    Utility.printArguments(["ArchiveDir", "BaseDir", "ArchiveName"],
                           [archive_dir, basedir, archive_name],
                           "A03 ProjectZipArchive")

    if archive_dir is None or basedir is None or archive_name is None:
        arcpy.AddError(
            'Failed to retrieve project info: archive_dir={} base_dir={} archive_name={}'
            .format(archive_dir, basedir, archive_name))
    else:
        # ## Currently archiving entire project directory.

        # ## Uncomment following to just archive the ORIGINAL folder
        # basedir = os.path.join(basedir, FoldersConfig.original_dir)

        # ## Uncomment following to just archive the DELIVEREDfolder
        # basedir = os.path.join(basedir, FoldersConfig.delivered_dir)
        cwd = os.getcwd()
        arcpy.AddMessage('Changeing working directory from {} to {}'.format(
            cwd, basedir))
        os.chdir(basedir)
        arcpy.AddMessage('Current working directory is {}'.format(os.getcwd()))

        # archive contents of folder basedir
        arcpy.AddMessage('archiving contents of directory {} to {}.zip'.format(
            basedir, archive_name))
        # shutil.make_archive(archive_name, 'zip', basedir)

        with zipfile.ZipFile(archive_name + '.zip',
                             "w",
                             zipfile.ZIP_DEFLATED,
                             allowZip64=True) as zf:
            for root, _, filenames in os.walk(basedir):
                for name in filenames:
                    name = os.path.join(root, name)
                    name = os.path.normpath(name)
                    a = doTime('adding to archive {} file {}'.format(
                        archive_name, name))
                    zf.write(name, name)

        # move the file to the archive directory
        a = doTime(
            'moving zip file to archive directory {}'.format(archive_dir))
        shutil.move("{}.zip".format(archive_name), archive_dir)
        a = doTime('Moved archive {} file to {}'.format(
            archive_name, archive_dir))

    doTime(aa,
           "Operation Complete: A03 Zip project and move to archive folder")
def PrepareContoursForJob(strJobId):
    Utility.printArguments(["WMXJobID"], [strJobId],
                           "C02 PrepareContoursForPublishing")
    aa = datetime.datetime.now()

    project_job, project, strUID = getProjectFromWMXJobID(
        strJobId)  # @UnusedVariable

    processJob(project_job, project, strUID)

    doTime(aa, "Operation Complete: C02 Create Contour Annotoations")
Exemple #5
0
def AddProjectToMaster(strJobId, MasterMDs_parent_path, masterService):
    aa = datetime.now()
    Utility.printArguments(["WMX Job ID", "masterParentDir", "masterService"],
                           [strJobId, MasterMDs_parent_path, masterService],
                           "A08 AddPrjectToMaster")

    ProjectJob, project, strUID = getProjectFromWMXJobID(
        strJobId)  # @UnusedVariable

    processJob(ProjectJob, project, strUID, MasterMDs_parent_path,
               master_md_name)

    doTime(aa, "Operation Complete: A06 Publish Mosaic Dataset")
def RemoveDEMErrantValues(strJobId):
    aa = datetime.now()
    Utility.printArguments(["WMX Job ID"], [strJobId], "A05 RemoveDEMErrantValues")
    arcpy.CheckOutExtension("3D")
    arcpy.CheckOutExtension("Spatial")

    ProjectJob, project, strUID = getProjectFromWMXJobID(strJobId)  # @UnusedVariable

    errorMsg = processJob(ProjectJob, project, strUID)

    arcpy.CheckInExtension("3D")
    arcpy.CheckInExtension("Spatial")
    doTime(aa, "Operation Complete: A05 Remove DEM Errant Values")
    if len(errorMsg) > 0:
        raise Exception("Failed to process {} raster data correctly".format(" ".join(errorMsg)))
Exemple #7
0
def createMosaicDataset(gdb_path, md_name, spatial_reference):
    a = datetime.now()

    md_path = os.path.join(gdb_path, md_name)
    deleteFileIfExists(md_path, useArcpy=True)
    # Create a MD in same SR as file
    arcpy.CreateMosaicDataset_management(in_workspace=gdb_path,
                                         in_mosaicdataset_name=md_name,
                                         coordinate_system=spatial_reference,
                                         num_bands="1",
                                         pixel_type="32_BIT_FLOAT",
                                         product_definition="NONE",
                                         product_band_definitions="#")

    doTime(a, "\tCreated MD {}".format(md_path))

    return md_path
Exemple #8
0
def PublishMosaicDataset(strJobId,
                         serverConnectionFile,
                         serverFunctionPath,
                         update=False,
                         runCount=0):
    aa = datetime.now()
    Utility.printArguments([
        "jobID", "serverConnectionFile", "serverFunctionPath", "update",
        "runCount"
    ], [strJobId, serverConnectionFile, serverFunctionPath, update, runCount],
                           "A07 PublishMosaicDataset")

    ProjectJob, project, strUID = getProjectFromWMXJobID(
        strJobId)  # @UnusedVariable

    processJob(ProjectJob, project, strUID, serverConnectionFile,
               serverFunctionPath)

    doTime(aa, "Operation Complete: A07 Publish Mosaic Dataset")
Exemple #9
0
def GenerateQALasDataset(strJobId,
                         createQARasters=False,
                         createMissingRasters=True,
                         overrideBorderPath=None):
    Utility.printArguments([
        "WMXJobID", "createQARasters", "createMissingRasters",
        "overrideBorderPath"
    ], [strJobId, createQARasters, createMissingRasters, overrideBorderPath],
                           "A04 GenerateQALasDataset")

    aa = datetime.now()
    arcpy.AddMessage("Checking out licenses")
    arcpy.CheckOutExtension("3D")
    arcpy.CheckOutExtension("Spatial")

    ProjectJob, project, strUID = getProjectFromWMXJobID(
        strJobId)  # @UnusedVariable

    las_qainfo, lasd_boundary = processJob(ProjectJob, project,
                                           createQARasters,
                                           createMissingRasters,
                                           overrideBorderPath)
    try:
        if las_qainfo is not None and os.path.exists(las_qainfo.filegdb_path):
            arcpy.Compact_management(in_workspace=las_qainfo.filegdb_path)
    except:
        pass

        # @TODO: Move this to another standalone script
        # updateCMDR(ProjectJob, project, las_qainfo, updatedBoundary)

    arcpy.AddMessage("Checking in licenses")
    arcpy.CheckInExtension("3D")
    arcpy.CheckInExtension("Spatial")

    if las_qainfo.num_las_files <= 0:
        raise Exception(
            "Project has no .las files in DELIVERED LAS_CLASSIFIED or LAS_UNCLASSIFIED folders, CANNOT CONTINUE.\nERROR: {}"
            .format(project))

    doTime(aa, "Operation Complete: A04 Generate QA LASDataset")
def CreateProjectFolders(parent_path=None,
                 project_id=None,
                 project_path=None 
                 ):
    a = datetime.now()
    Utility.printArguments(["parent_path", "project_id", "project_path"],
                   [parent_path, project_id, project_path], "A01 CreateProjectFolders")
    
    projectFolder = Project(parent_path=parent_path, projectId=project_id, path=project_path)
    
    arcpy.AddMessage("Working on project path {}".format(projectFolder.path))
    
    messages, errors, warnings = projectFolder.make()
    for message in messages:
        arcpy.AddMessage(message)
    for warning in warnings:
        arcpy.AddWarning(warning)
    for error in errors:
        arcpy.AddError(error)
    arcpy.AddMessage("Finished creating project '{}' directory structure".format(projectFolder.path))

    doTime(a, "Operation Complete: A01 Create Project Folders")
Exemple #11
0
def checkNullFields(las_footprint):
    a = datetime.datetime.now()
    base_fields = [
                   FIELD_INFO[AREA][0],
              FIELD_INFO[MAX][0],
              FIELD_INFO[MEAN][0],
              FIELD_INFO[MIN][0],
              FIELD_INFO[RANGE][0],
              FIELD_INFO[STAND_DEV][0],
              FIELD_INFO[XMIN][0],
              FIELD_INFO[YMIN][0],
              FIELD_INFO[XMAX][0],
              FIELD_INFO[YMAX][0]
              ]

    class_fields = [
                    FIELD_INFO[FIRST_RETURNS][0],
                    FIELD_INFO[SECOND_RETURNS][0],
                    FIELD_INFO[THIRD_RETURNS][0],
                    FIELD_INFO[FOURTH_RETURNS][0],
                    FIELD_INFO[SINGLE_RETURNS][0],
                    FIELD_INFO[FIRST_OF_MANY_RETURNS][0],
                    FIELD_INFO[LAST_OF_MANY_RETURNS][0],
                    FIELD_INFO[ALL_RETURNS][0]
                    ]
    value_fields = [
                  FIELD_INFO[POINT_SPACING][0],
                  FIELD_INFO[MAX][0],
                  FIELD_INFO[MIN][0],
                  FIELD_INFO[RANGE][0]
                  ]
    for clazz in range(0, 18):
        if clazz <> 7 and clazz <> 18:
            value = "c{}".format(("0{}".format(clazz))[-2:])
            class_fields.append(value)
    for class_field in class_fields:
        for value_field in value_fields:
            base_fields.append("{}_{}".format(class_field, value_field))


    for field in base_fields:
        # arcpy.AddMessage("Nulling field '{}'".format(field))
        try:
            arcpy.CalculateField_management(in_table=las_footprint, field=field, expression="checkNull( !{}! )".format(field), expression_type="PYTHON_9.3", code_block="def checkNull( value ):\n   if value == 0:\n      value = None\n   return value")
        except:
            pass
    a = doTime(a, "Checked for nulls on las footprints {}".format(las_footprint))
Exemple #12
0
def createQARasterMosaicDataset(md_name, gdb_path, spatial_reference, input_folder, mxd, footprint_path=None, lasd_boundary_path=None):
    #Utility.printArguments(["md_name", "gdb_path", "spatial_reference", "input_folder", "mxd", "footprint_path", "lasd_boundary_path"],
    #                       [md_name, gdb_path, spatial_reference, input_folder, mxd, footprint_path, lasd_boundary_path], "A04_C CreateQARasterMosaicDatasets")

    md_path = os.path.join(gdb_path, md_name)

    a = datetime.datetime.now()

    if arcpy.Exists(md_path):
        arcpy.AddMessage("\tMD Exists: {}".format(md_path))
    else:
        try:
            raster_count = 0
            #arcpy.AddMessage("\t\tLooking for rasters to add to {} in folder {}".format(md_path, input_folder))
            for root, dirs, files in os.walk(input_folder):  # @UnusedVariable
                for f in files:
                    if f.upper().endswith(".TIF"):
                        raster_count = raster_count+1

            if raster_count <=0:
                arcpy.AddMessage("\t\tNo rasters to add to {} in folder {}".format(md_path, input_folder))
            else:
                arcpy.AddMessage("\t\tLooking for rasters to add to {} in folder {}".format(md_path, input_folder))
                # Create a MD in same SR as LAS Dataset
                arcpy.CreateMosaicDataset_management(in_workspace=gdb_path,
                                                     in_mosaicdataset_name=md_name,
                                                     coordinate_system=spatial_reference,
                                                     num_bands="",
                                                     pixel_type="",
                                                     product_definition="NONE",
                                                     product_band_definitions="")

                arcpy.SetMosaicDatasetProperties_management(in_mosaic_dataset=md_path, rows_maximum_imagesize="4100", columns_maximum_imagesize="15000", allowed_compressions="None;JPEG;LZ77;LERC", default_compression_type="LERC", JPEG_quality="75", LERC_Tolerance="0.01", resampling_type="CUBIC", clip_to_footprints="NOT_CLIP", footprints_may_contain_nodata="FOOTPRINTS_DO_NOT_CONTAIN_NODATA", clip_to_boundary="CLIP", color_correction="NOT_APPLY", allowed_mensuration_capabilities="Basic", default_mensuration_capabilities="Basic", allowed_mosaic_methods="NorthWest;Center;LockRaster;ByAttribute;Nadir;Viewpoint;Seamline;None", default_mosaic_method="NorthWest", order_field="", order_base="", sorting_order="ASCENDING", mosaic_operator="FIRST", blend_width="10", view_point_x="600", view_point_y="300", max_num_per_mosaic="2000", cell_size_tolerance="0.8", cell_size="10 10", metadata_level="BASIC", transmission_fields="Name;MinPS;MaxPS;LowPS;HighPS;Tag;GroupName;ProductName;CenterX;CenterY;ZOrder;Shape_Length;Shape_Area;Thumbnail", use_time="DISABLED", start_time_field="", end_time_field="", time_format="", geographic_transform="", max_num_of_download_items="20", max_num_of_records_returned="1000", data_source_type="GENERIC", minimum_pixel_contribution="1", processing_templates="None", default_processing_template="None", time_interval="", time_interval_units="")
                a = doTime(a, "\t\tCreated MD {}".format(md_name))

                arcpy.AddRastersToMosaicDataset_management(in_mosaic_dataset=md_path,
                                                           raster_type="Raster Dataset",
                                                           input_path=input_folder,
                                                           update_cellsize_ranges="UPDATE_CELL_SIZES",
                                                           update_boundary="UPDATE_BOUNDARY",
                                                           update_overviews="NO_OVERVIEWS",
                                                           maximum_pyramid_levels="",
                                                           maximum_cell_size="0",
                                                           minimum_dimension="150",
                                                           spatial_reference="",
                                                           filter="#",
                                                           sub_folder="SUBFOLDERS",
                                                           duplicate_items_action="EXCLUDE_DUPLICATES",
                                                           build_pyramids="BUILD_PYRAMIDS",
                                                           calculate_statistics="CALCULATE_STATISTICS",
                                                           build_thumbnails="BUILD_THUMBNAILS",
                                                           operation_description="#",
                                                           force_spatial_reference="NO_FORCE_SPATIAL_REFERENCE",
                                                           estimate_statistics="ESTIMATE_STATISTICS",
                                                           aux_inputs="")

                total_rows = int(arcpy.GetCount_management(md_path).getOutput(0))
                if total_rows <= 0:
                    arcpy.AddWarning("No rasters found for '{}'".format(md_path))
                    deleteFileIfExists(md_path, True)
                else:
                    try:
                        importMosaicDatasetGeometries(md_path, footprint_path, lasd_boundary_path)
                    except:
                        arcpy.AddWarning("Failed to update MD boundaries for '{}'".format(md_path))

                a = doTime(a, "\tAdded Rasters to MD {}".format(md_name))


        except:
            arcpy.AddWarning("Failed to create MD for QA Raster Layer '{}'. Please remove any locks and delete related intermediate files".format(md_path))

    return [md_path, md_name]
Exemple #13
0
def createBoundaryFeatureClass(raster_footprint, target_raster_boundary, statistics_fields="", alter_field_infos=None):
    a = datetime.datetime.now()
    aa = a
    deleteFields(raster_footprint)

    lasd_boundary_0 = "{}0".format(target_raster_boundary)
    lasd_boundary_1 = "{}1".format(target_raster_boundary)

    deleteFileIfExists(lasd_boundary_0, True)
    deleteFileIfExists(lasd_boundary_1, True)

    arcpy.AddMessage("\tMultipart to Singlepart")
    arcpy.MultipartToSinglepart_management(in_features=raster_footprint, out_feature_class=lasd_boundary_0)
    Utility.addToolMessages()
    arcpy.RepairGeometry_management(in_features=lasd_boundary_0, delete_null="DELETE_NULL")
    deleteFields(lasd_boundary_0)

    arcpy.AddMessage("\tBuffering")
    arcpy.Buffer_analysis(in_features=lasd_boundary_0, out_feature_class=lasd_boundary_1, buffer_distance_or_field="10 Meters", line_side="FULL", line_end_type="ROUND", dissolve_option="NONE", method="PLANAR")
    Utility.addToolMessages()
    arcpy.RepairGeometry_management(in_features=lasd_boundary_1, delete_null="DELETE_NULL")
    deleteFields(lasd_boundary_1)

    deleteFileIfExists(lasd_boundary_0, True)

    lasd_boundary_2 = "{}2".format(target_raster_boundary)
    deleteFileIfExists(lasd_boundary_2, True)
    arcpy.AddMessage("\tDissolving with statistics: {}".format(statistics_fields))
    arcpy.Dissolve_management(
        in_features=lasd_boundary_1,
        out_feature_class=lasd_boundary_2,
        statistics_fields=statistics_fields
        )
    Utility.addToolMessages()
    arcpy.RepairGeometry_management(in_features=lasd_boundary_2, delete_null="DELETE_NULL")
    deleteFields(lasd_boundary_2)
    a = doTime(a, "\tDissolved to {}".format(lasd_boundary_2))


    if alter_field_infos is not None:
        for alter_field_info in alter_field_infos:
            try:
                alterField(lasd_boundary_2, alter_field_info[0], alter_field_info[1], alter_field_info[2])
            except:
                pass

        a = doTime(a, "\tRenamed summary fields")

    lasd_boundary_3 = "{}3".format(target_raster_boundary)
    deleteFileIfExists(lasd_boundary_3, True)
    arcpy.EliminatePolygonPart_management(in_features=lasd_boundary_2, out_feature_class=lasd_boundary_3, condition="AREA", part_area="10000 SquareMiles", part_area_percent="0", part_option="CONTAINED_ONLY")
    arcpy.RepairGeometry_management(in_features=lasd_boundary_3, delete_null="DELETE_NULL")
    deleteFileIfExists(lasd_boundary_1, True)
    deleteFields(lasd_boundary_3)
    lasd_boundary_4 = "{}4".format(target_raster_boundary)
    deleteFileIfExists(lasd_boundary_4, True)
    arcpy.SimplifyPolygon_cartography(in_features=lasd_boundary_3, out_feature_class=lasd_boundary_4, algorithm="BEND_SIMPLIFY", tolerance="20 Meters", minimum_area="0 Unknown", error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP", in_barriers="")
    arcpy.RepairGeometry_management(in_features=lasd_boundary_4, delete_null="DELETE_NULL")
    deleteFields(lasd_boundary_4)
    #try:
    #    arcpy.DeleteField_management(in_table=lasd_boundary_4, drop_field="Id;ORIG_FID;InPoly_FID;SimPgnFlag;MaxSimpTol;MinSimpTol")
    #except:
    #    pass
    deleteFileIfExists(lasd_boundary_3, True)

    deleteFileIfExists(target_raster_boundary, True)
    arcpy.Buffer_analysis(in_features=lasd_boundary_4, out_feature_class=target_raster_boundary, buffer_distance_or_field="-10 Meters", line_side="FULL", line_end_type="ROUND", dissolve_option="ALL", method="PLANAR")
    arcpy.RepairGeometry_management(in_features=target_raster_boundary, delete_null="DELETE_NULL")
    deleteFields(target_raster_boundary)
    deleteFileIfExists(lasd_boundary_4, True)

    if alter_field_infos is not None and len(alter_field_infos) > 0:
        fields = ";".join([field[1] for field in alter_field_infos])
        arcpy.JoinField_management(in_data=target_raster_boundary, in_field="OBJECTID", join_table=lasd_boundary_2, join_field="OBJECTID", fields=fields)
        Utility.addToolMessages()

    deleteFileIfExists(lasd_boundary_2, True)

    a = doTime(aa, "Dissolved las footprints to dataset boundary {} ".format(target_raster_boundary))
Exemple #14
0
def processJob(ProjectJob,
               project,
               createQARasters=False,
               createMissingRasters=True,
               overrideBorderPath=None):
    aaa = datetime.now()
    a = aaa
    lasd_boundary = None

    ProjectFolder = ProjectFolders.getProjectFolderFromDBRow(
        ProjectJob, project)
    ProjectID = ProjectJob.getProjectID(project)
    ProjectUID = ProjectJob.getUID(project)

    target_path = ProjectFolder.derived.path

    # Get the LAS QA Info to determine if it is classified or not
    las_qainfo = getLasQAInfo(ProjectFolder)
    if las_qainfo.num_las_files <= 0:
        arcpy.AddError(
            "Project with Job ID {} has no .las files in DELIVERED LAS_CLASSIFIED or LAS_UNCLASSIFIED folders, CANNOT CONTINUE."
            .format(ProjectFolder.projectId))
    else:
        ProjectFolders.createAnalysisFolders(target_path,
                                             las_qainfo.isClassified)

        # Make the STAT folder if it doesn't already exist

        stat_out_folder = ProjectFolder.derived.stats_path
        if not os.path.exists(stat_out_folder):
            os.makedirs(stat_out_folder)
            arcpy.AddMessage(
                "created Derived STAT folder '{}'".format(stat_out_folder))
        else:
            arcpy.AddMessage("STAT folder '{}'".format(stat_out_folder))

        # Make the scratch file GDB for the project
        if not os.path.exists(las_qainfo.filegdb_path):
            arcpy.CreateFileGDB_management(target_path,
                                           las_qainfo.filegdb_name)
            Utility.addToolMessages()
        else:
            arcpy.AddMessage(
                "Derived fGDB sand box already exists. Using '{}'".format(
                    las_qainfo.filegdb_path))

        las_qainfo.lasd_spatial_ref = checkSpatialOnLas(
            las_qainfo.las_directory, target_path, createQARasters,
            las_qainfo.isClassified)

        if las_qainfo.lasd_spatial_ref is None:
            arcpy.AddError(
                "ERROR:   Neither spatial reference in PRJ or LAS files are valid CANNOT CONTINUE."
            )
            arcpy.AddError(
                "ERROR:   Please create a projection file (.prj) in the LAS folder using the '3D Analyst Tools/Conversion/From File/Point File Information' tool."
            )

        elif not las_qainfo.isValidSpatialReference():
            las_qainfo.lasd_spatial_ref = None
            arcpy.AddError(
                "ERROR: Spatial Reference for the las files is not standard (see above)"
            )
            arcpy.AddError(
                "ERROR: Please create a projection file (.prj) in the LAS folder using the '3D Analyst Tools/Conversion/From File/Point File Information' tool."
            )
            try:
                arcpy.AddError("ERROR: '{}'".format(
                    Utility.getSpatialReferenceInfo(
                        las_qainfo.lasd_spatial_ref)))
            except:
                pass

        elif las_qainfo.isUnknownSpatialReference():
            las_qainfo.lasd_spatial_ref = None
            arcpy.AddError(
                "ERROR: Please provide a projection file (.prj) that provides a valid transformation in the LAS directory."
            )
            arcpy.AddError(
                "ERROR:   Please create a projection file (.prj) in the LAS folder using the '3D Analyst Tools/Conversion/From File/Point File Information' tool."
            )
            arcpy.AddError(
                "ERROR: Spatial Reference for the las files is not standard")
            try:
                arcpy.AddError("ERROR: '{}'".format(
                    Utility.getSpatialReferenceInfo(
                        las_qainfo.lasd_spatial_ref)))
            except:
                pass

        if las_qainfo.lasd_spatial_ref is None:
            raise Exception(
                "Error: Spatial Reference is invalid, unknown, or not specified."
            )
        else:
            #         prj_Count, prj_File = Utility.fileCounter(las_qainfo.las_directory, '.prj')
            #         if prj_Count > 0 and prj_File is not None and len(str(prj_File)) > 0:
            #             prj_spatial_ref = os.path.join(las_qainfo.las_directory, prj_File)
            #
            #         if prj_Count > 0:
            #             las_qainfo.setProjectionFile(prj_File)
            #             las_spatial_ref = os.path.join(las_qainfo.las_directory, prj_File)
            #             arcpy.AddMessage("Found a projection file with the las files, OVERRIDE LAS SR (if set) '{}'".format(las_spatial_ref))
            #             arcpy.AddMessage(Utility.getSpatialReferenceInfo(las_qainfo.getSpatialReference()))
            #         else:
            #             arcpy.AddMessage("Using projection (coordinate system) from las files if available.")

            fileList = getLasFileProcessList(las_qainfo.las_directory,
                                             target_path, createQARasters,
                                             las_qainfo.isClassified)
            createLasStatistics(fileList, target_path,
                                las_qainfo.lasd_spatial_ref,
                                las_qainfo.isClassified, createQARasters,
                                createMissingRasters, overrideBorderPath)

            # Create the project's las dataset. Don't do this before you validated that each .las file has a .lasx
            if os.path.exists(las_qainfo.las_dataset_path):
                arcpy.AddMessage("Using existing LAS Dataset {}".format(
                    las_qainfo.las_dataset_path))
                # arcpy.AddMessage("Deleting existing LAS Dataset {}".format(las_qainfo.las_dataset_path))
                # arcpy.Delete_management(las_qainfo.las_dataset_path)
            else:
                a = datetime.now()
                # note: don't use method in A04_B because we don't want to compute statistics this time
                arcpy.CreateLasDataset_management(
                    input=las_qainfo.las_directory,
                    out_las_dataset=las_qainfo.las_dataset_path,
                    folder_recursion="RECURSION",
                    in_surface_constraints="",
                    spatial_reference=las_qainfo.lasd_spatial_ref,
                    compute_stats="NO_COMPUTE_STATS",
                    relative_paths="RELATIVE_PATHS",
                    create_las_prj="FILES_MISSING_PROJECTION")
                Utility.addToolMessages()
                a = doTime(
                    a, "Created LAS Dataset '{}'".format(
                        las_qainfo.las_dataset_path))

            desc = arcpy.Describe(las_qainfo.las_dataset_path)

            # las_qainfo.lasd_spatial_ref = desc.SpatialReference
            las_qainfo.LASDatasetPointCount = desc.pointCount
            las_qainfo.LASDatasetFileCount = desc.fileCount
            arcpy.AddMessage(
                "LASDatasetPointCount {} and LASDatasetFileCount {}".format(
                    desc.pointCount, desc.fileCount))

            lasd_boundary, las_footprint = A04_C_ConsolidateLASInfo.createRasterBoundaryAndFootprints(
                las_qainfo.filegdb_path, target_path, ProjectID,
                ProjectFolder.path, ProjectUID)

            mxd = createMXD(las_qainfo, target_path, ProjectID)

            # if createQARasters:
            arcpy.AddMessage("Creating QA raster mosaics")
            mosaics = A04_C_ConsolidateLASInfo.createQARasterMosaics(
                las_qainfo.isClassified, las_qainfo.filegdb_path,
                las_qainfo.lasd_spatial_ref, target_path, mxd, las_footprint,
                lasd_boundary)
            if mxd is not None:
                a = datetime.now()
                try:
                    mxd_path = mxd.filePath
                    for [md_path, md_name] in mosaics:
                        arcpy.AddMessage(
                            "Adding QA raster mosaic {} to mxd {}".format(
                                md_path, mxd_path))
                        try:
                            if not arcpy.Exists(md_path):
                                a = doTime(
                                    a,
                                    "\tMD doesn't exist {}. Can't add to MXD {}. Is it open?"
                                    .format(md_path, mxd_path))
                            else:
                                df = mxd.activeDataFrame
                                if isLayerExist(mxd, df, md_name):
                                    a = doTime(
                                        a, "\t MD {} already exists in MXD {}".
                                        format(md_name, mxd_path))
                                else:
                                    if len(str(md_name)) > 0:
                                        try:
                                            lyr_md = arcpy.MakeMosaicLayer_management(
                                                in_mosaic_dataset=md_path,
                                                out_mosaic_layer=md_name
                                            ).getOutput(0)
                                            df = mxd.activeDataFrame
                                            arcpy.mapping.AddLayer(
                                                df, lyr_md, 'BOTTOM')
                                            # lyr_md.visible = False
                                            mxd.save()
                                            a = doTime(
                                                a,
                                                "\tAdded MD {} to MXD {} as {}"
                                                .format(
                                                    md_name, mxd_path, lyr_md))
                                        except:
                                            a = doTime(
                                                a,
                                                "\tfailed to add MD {} to MXD {}. Is it open?"
                                                .format(md_path, mxd_path))

                        except:
                            try:
                                a = doTime(
                                    a,
                                    "\tfailed to add MD to MXD {}. Is it open?"
                                    .format(mxd_path))
                            except:
                                pass

                    mxd.save()
                except:
                    try:
                        a = doTime(
                            a, "\tfailed to save MXD {}. Is it open?".format(
                                mxd_path))
                    except:
                        pass

    bbb = datetime.now()
    td = (bbb - aaa).total_seconds()
    arcpy.AddMessage("Completed {} in {}".format(las_qainfo.las_dataset_path,
                                                 td))

    return las_qainfo, lasd_boundary
Exemple #15
0
def createLasStatistics(fileList,
                        target_path,
                        spatial_reference=None,
                        isClassified=True,
                        createQARasters=False,
                        createMissingRasters=True,
                        overrideBorderPath=None,
                        runAgain=True):
    a = datetime.now()
    path = os.path.join(RunUtil.TOOLS_PATH, "A04_B_CreateLASStats.py")
    Utility.printArguments([
        "fileList", "target_path", "spatial_reference", "isClassified",
        "createQARasters", "createMissingRasters", "overrideBorderPath"
    ], [
        fileList, target_path, spatial_reference, isClassified,
        createQARasters, createMissingRasters, overrideBorderPath
    ], "createLasStatistics")

    grouping = PROCESS_CHUNKS
    if not runAgain:
        grouping = int(PROCESS_CHUNKS / 2)
    if grouping <= 1:
        grouping = 2

    total = len(fileList)
    if total > 0:

        fileList_repeat = []

        procCount = int(os.environ['NUMBER_OF_PROCESSORS'])
        if procCount > 4:
            procCount = procCount - PROCESS_SPARES
        arcpy.AddMessage(
            "\tUsing {}/{} Processors to process {} files in groups of {}".
            format(procCount, (procCount + PROCESS_SPARES), total, grouping))
        processList = []

        indx = 0
        for f_paths in Utility.grouper(fileList, grouping):
            f_paths = [x for x in f_paths if x is not None]
            f_path = ",".join(f_paths)
            indx = indx + len(f_paths)

            #arcpy.AddMessage('\t Working on {}/{}: {}'.format(indx, total, f_path))
            arcpy.AddMessage('\t Working on {}/{}'.format(indx, total))
            args = [
                f_path, target_path, spatial_reference,
                "{}".format(isClassified), "{}".format(createQARasters),
                "{}".format(createMissingRasters), overrideBorderPath
            ]

            try:
                processList.append(
                    RunUtil.runToolx64_async(path, args, "A04_B", target_path))
                # give time for things to wake up
                time.sleep(PROCESS_DELAY)
            except:
                tb = sys.exc_info()[2]
                tbinfo = traceback.format_tb(tb)[0]
                pymsg = " PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n    " + \
                        str(sys.exc_type) + ": " + str(sys.exc_value) + "\n"
                arcpy.AddWarning(pymsg)
                msgs = "GP ERRORS:\n" + arcpy.GetMessages(2) + "\n"
                arcpy.AddWarning(msgs)
                sys.exit(1)

            waitForResults = True
            first = True
            while waitForResults:
                if not first:
                    time.sleep(1)
                first = False
                # arcpy.AddMessage('Looping LEN Process List = {} ProcCount = {} is greater = {}'.format(len(processList), procCount, (len(processList) >= procCount)))
                for i, [p, l] in enumerate(processList):
                    if p.poll() is not None:
                        # error log messages are handled in
                        retCode = RunUtil.endRun_async(path, p, l)
                        if retCode <> 0:
                            fileList_repeat.append(f_path)
                        del processList[i]

                waitForResults = (len(processList) >= int(procCount))

        # Wait for last subprocesses to complete
        arcpy.AddMessage("\tWaiting for process list to clear {} jobs".format(
            len(processList)))
        while len(processList) > 0:
            for i, [p, l] in enumerate(processList):
                if p.poll() is not None:
                    retCode = RunUtil.endRun_async(path, p, l)
                    if retCode <> 0:
                        fileList_repeat.append(f_path)
                    del processList[i]
                    if len(processList) > 0:
                        arcpy.AddMessage(
                            "\tWaiting for process list to clear {} jobs".
                            format(len(processList)))

                else:
                    # arcpy.AddMessage("Waiting for process list to clear {} jobs".format(len(processList)))
                    time.sleep(PROCESS_DELAY)


# BRUCE's code here
        if runAgain and len(fileList_repeat) > 0:
            # try to clean up any errors along the way
            createLasStatistics(fileList,
                                target_path,
                                spatial_reference,
                                isClassified,
                                createQARasters,
                                createMissingRasters,
                                overrideBorderPath,
                                runAgain=False)
        elif not runAgain and len(fileList_repeat) > 0:
            arcpy.AddError("Error processing .las files.")
            raise Exception("Error processing .las files.")

        doTime(a, 'createLasStatistics: All jobs completed.')
def createBoundaryFeatureClass(raster_footprint,
                               target_raster_boundary,
                               statistics_fields="",
                               alter_field_infos=None):
    a = datetime.datetime.now()
    aa = a

    raster_boundary_1 = "{}1".format(target_raster_boundary)
    deleteFileIfExists(raster_boundary_1, True)
    arcpy.Buffer_analysis(in_features=raster_footprint,
                          out_feature_class=raster_boundary_1,
                          buffer_distance_or_field="10 Meters",
                          line_side="FULL",
                          line_end_type="ROUND",
                          dissolve_option="NONE",
                          method="PLANAR")
    arcpy.RepairGeometry_management(in_features=raster_boundary_1,
                                    delete_null="DELETE_NULL")
    deleteFields(raster_boundary_1)
    a = doTime(a, "\tBuffer out into {}".format(raster_boundary_1))

    raster_boundary_2 = "{}2".format(target_raster_boundary)
    deleteFileIfExists(raster_boundary_2, True)
    arcpy.AddMessage(
        "\tDissolving with statistics: {}".format(statistics_fields))
    arcpy.Dissolve_management(in_features=raster_boundary_1,
                              out_feature_class=raster_boundary_2,
                              dissolve_field=FIELD_INFO[ELEV_TYPE][0],
                              statistics_fields=statistics_fields)
    arcpy.RepairGeometry_management(in_features=raster_boundary_2,
                                    delete_null="DELETE_NULL")
    deleteFields(raster_boundary_2)
    a = doTime(a, "\tDissolved to {}".format(raster_boundary_2))

    deleteFileIfExists(raster_boundary_1, True)

    alterFields(alter_field_infos, raster_boundary_2)
    a = doTime(a, "\tAltered Fields on {}".format(raster_boundary_2))

    raster_boundary_3 = "{}3".format(target_raster_boundary)
    deleteFileIfExists(raster_boundary_3, True)
    arcpy.EliminatePolygonPart_management(in_features=raster_boundary_2,
                                          out_feature_class=raster_boundary_3,
                                          condition="AREA",
                                          part_area="10000 SquareMiles",
                                          part_area_percent="0",
                                          part_option="CONTAINED_ONLY")
    arcpy.RepairGeometry_management(in_features=raster_boundary_3,
                                    delete_null="DELETE_NULL")
    deleteFields(raster_boundary_3)
    a = doTime(a,
               "\tEliminated internal parts on {}".format(raster_boundary_3))

    # Don't delete raster boundary 2 because we need it later

    # JWS 4/26 - Bend Simplify -> Point Remove & 20 Meters -> 0.1 Meters
    raster_boundary_4 = "{}4".format(target_raster_boundary)
    deleteFileIfExists(raster_boundary_4, True)
    arcpy.SimplifyPolygon_cartography(in_features=raster_boundary_3,
                                      out_feature_class=raster_boundary_4,
                                      algorithm="POINT_REMOVE",
                                      tolerance="0.1 Meters",
                                      minimum_area="0 Unknown",
                                      error_option="RESOLVE_ERRORS",
                                      collapsed_point_option="NO_KEEP",
                                      in_barriers="")
    arcpy.RepairGeometry_management(in_features=raster_boundary_4,
                                    delete_null="DELETE_NULL")
    deleteFields(raster_boundary_4)
    a = doTime(a, "\tSimplified to {}".format(raster_boundary_4))

    deleteFileIfExists(raster_boundary_3, True)

    deleteFileIfExists(target_raster_boundary, True)
    arcpy.Buffer_analysis(in_features=raster_boundary_4,
                          out_feature_class=target_raster_boundary,
                          buffer_distance_or_field="-10 Meters",
                          line_side="FULL",
                          line_end_type="ROUND",
                          dissolve_option="NONE",
                          method="PLANAR")
    arcpy.RepairGeometry_management(in_features=target_raster_boundary,
                                    delete_null="DELETE_NULL")
    deleteFields(target_raster_boundary)
    a = doTime(a, "\tBuffer back into {}".format(target_raster_boundary))

    deleteFileIfExists(raster_boundary_4, True)

    if alter_field_infos is not None and len(alter_field_infos) > 0:
        fields = ";".join([field[1] for field in alter_field_infos])
        arcpy.JoinField_management(in_data=target_raster_boundary,
                                   in_field="OBJECTID",
                                   join_table=raster_boundary_2,
                                   join_field="OBJECTID",
                                   fields=fields)
        # Utility.addToolMessages()
        a = doTime(
            a, "\tJoined {} with {}".format(target_raster_boundary,
                                            raster_boundary_2))

    deleteFileIfExists(raster_boundary_2, True)

    a = doTime(
        aa, "Dissolved raster footprints to dataset boundary {} ".format(
            target_raster_boundary))
def processJob(ProjectJob, project, ProjectUID):

    a = datetime.now()
    aa = a
    aaa = aa

    ProjectFolder = ProjectFolders.getProjectFolderFromDBRow(
        ProjectJob, project)
    ProjectID = ProjectJob.getProjectID(project)
    ProjectUID = ProjectJob.getUID(project)

    elev_types = [DTM, DSM, DLM, INT]
    target_path = ProjectFolder.derived.path
    publish_path = ProjectFolder.published.path
    fgdb_path = ProjectFolder.derived.fgdb_path

    lasd_boundary = A04_C_ConsolidateLASInfo.getLasdBoundaryPath(fgdb_path)
    raster_footprints, raster_boundaries = [], []

    raster_footprint_main = A05_C_ConsolidateRasterInfo.getRasterFootprintPath(
        fgdb_path)
    raster_boundary_main = A05_C_ConsolidateRasterInfo.getRasterBoundaryPath(
        fgdb_path)

    spatialRef_error = {}
    z_min, z_max, v_name, v_unit, h_name, h_unit, h_wkid, is_classified = getLasdBoundData(
        lasd_boundary)  # @UnusedVariable
    arcpy.AddMessage('TRACKING')
    arcpy.AddMessage(v_name)

    # Explicitely check if DTM images exist. If not bail
    start_dir = os.path.join(ProjectFolder.delivered.path, DTM)
    f_name = getFileProcessList(start_dir,
                                DTM,
                                target_path,
                                publish_path,
                                return_first=True,
                                check_sr=False)
    if f_name is None:
        arcpy.AddError(
            "No DTM Files Found in {} folder, cannot proceed.".format(
                start_dir))
        raise Exception("No DTM Files Found, cannot proceed.")

    for elev_type in elev_types:

        spatialRef_error[elev_type] = False
        start_dir = os.path.join(ProjectFolder.delivered.path, elev_type)
        arcpy.AddMessage('### START DIR {}'.format(start_dir))
        f_name = getFileProcessList(start_dir,
                                    elev_type,
                                    target_path,
                                    publish_path,
                                    return_first=True,
                                    check_sr=False)
        if f_name is None:
            arcpy.AddMessage(
                "Trying DERIVED source. No {} rasters found to re-value in {}."
                .format(elev_type, start_dir))
            if elev_type == DSM:
                start_dir = os.path.join(ProjectFolder.derived.path,
                                         "ELEVATION", "FIRST")
            elif elev_type == DLM:
                start_dir = os.path.join(ProjectFolder.derived.path,
                                         "ELEVATION", "ALAST")
            elif elev_type == INT:
                start_dir = os.path.join(ProjectFolder.derived.path,
                                         "INTENSITY", "FIRST")
            f_name = getFileProcessList(start_dir,
                                        elev_type,
                                        target_path,
                                        publish_path,
                                        return_first=True,
                                        check_sr=False)

        if f_name is None:
            arcpy.AddWarning("No {} rasters found to re-value in {}".format(
                elev_type, start_dir))
        else:
            spatial_ref = validateRasterSpaitialRef(ProjectFolder, start_dir,
                                                    elev_type, target_path,
                                                    v_name, v_unit, h_name,
                                                    h_unit, h_wkid)

            if spatial_ref is None:
                spatialRef_error[elev_type] = True
            else:
                spatialRef_error[elev_type] = False
                fileList = getFileProcessList(start_dir, elev_type,
                                              target_path, publish_path)
                processRastersInFolder(fileList, target_path, publish_path,
                                       elev_type, lasd_boundary, z_min, z_max,
                                       v_name, v_unit, h_name, h_unit, h_wkid,
                                       spatial_ref)
                raster_footprint, raster_boundary = A05_C_ConsolidateRasterInfo.createRasterBoundaryAndFootprints(
                    fgdb_path, target_path, ProjectID, ProjectFolder.path,
                    ProjectUID, elev_type)
                if raster_footprint is not None:
                    raster_footprints.append(raster_footprint)
                    arcpy.RepairGeometry_management(
                        in_features=raster_footprint, delete_null="KEEP_NULL")
                if raster_boundary is not None:
                    raster_boundaries.append(raster_boundary)
                    arcpy.RepairGeometry_management(
                        in_features=raster_boundary, delete_null="KEEP_NULL")

        a = doTime(
            a,
            'COMPLETED: Finished processing {}\n---------------------------------------\n\n'
            .format(elev_type))

    aa = doTime(aa, 'COMPLETED: Finished processing all elevation types')

    if arcpy.Exists(raster_footprint_main):
        A05_C_ConsolidateRasterInfo.deleteFileIfExists(raster_footprint_main,
                                                       True)
    if len(raster_footprints) > 0:
        arcpy.Merge_management(inputs=raster_footprints,
                               output=raster_footprint_main)
        arcpy.AddMessage("Merged raster footprints {} to {}".format(
            raster_footprints, raster_footprint_main))
#         for raster_footprint in raster_footprints:
#             try:
#                 A05_C_ConsolidateRasterInfo.deleteFileIfExists(raster_footprint, True)
#             except:
#                 pass
    arcpy.RepairGeometry_management(in_features=raster_footprint_main,
                                    delete_null="KEEP_NULL")

    if arcpy.Exists(raster_boundary_main):
        A05_C_ConsolidateRasterInfo.deleteFileIfExists(raster_boundary_main,
                                                       True)
    if len(raster_boundaries) > 0:
        arcpy.Merge_management(inputs=raster_boundaries,
                               output=raster_boundary_main)
        arcpy.AddMessage("Merged raster boundaries {} to {}".format(
            raster_boundaries, raster_boundary_main))
#         for raster_boundary in raster_boundaries:
#             try:
#                 A05_C_ConsolidateRasterInfo.deleteFileIfExists(raster_boundary, True)
#             except:
#                 pass

    arcpy.RepairGeometry_management(in_features=raster_boundary_main,
                                    delete_null="KEEP_NULL")

    try:
        out_map_file_path = os.path.join(target_path,
                                         "{}.mxd".format(ProjectID))
        if not os.path.exists(out_map_file_path):
            mxd = arcpy.mapping.MapDocument(r"./blank.mxd")
            mxd.saveACopy(out_map_file_path)

        mxd = arcpy.mapping.MapDocument(out_map_file_path)
        mxd.relativePaths = True
        mxd_path = mxd.filePath
        if mxd is not None:
            df = mxd.activeDataFrame
            if not A04_A_GenerateQALasDataset.isLayerExist(
                    mxd, df, "Raster Boundary"):
                lyr_footprint = arcpy.MakeFeatureLayer_management(
                    raster_boundary_main, "Raster Boundary").getOutput(0)
                arcpy.mapping.AddLayer(df, lyr_footprint, 'TOP')
                arcpy.AddMessage("\tAdded MD {} to MXD {}.".format(
                    "Raster Boundary", mxd_path))

            if not A04_A_GenerateQALasDataset.isLayerExist(
                    mxd, df, "Raster Footprints"):
                lyr_footprint = arcpy.MakeFeatureLayer_management(
                    raster_footprint_main, "Raster Footprints").getOutput(0)
                arcpy.mapping.AddLayer(df, lyr_footprint, 'TOP')
                arcpy.AddMessage("\tAdded MD {} to MXD {}.".format(
                    "Raster Footprints", mxd_path))

            mxd.save()
    except:
        pass

    errorMsg = []
    for elev_type in spatialRef_error.keys():
        if spatialRef_error[elev_type]:
            arcpy.AddError("Failed to process {} correctly".format(elev_type))
            errorMsg.append(elev_type)

    try:
        if fgdb_path is not None and os.path.exists(fgdb_path):
            arcpy.Compact_management(in_workspace=fgdb_path)
    except:
        pass

    # JWS 6/8/18 - Push Derived FGDB Metadata to CMDR Geodatabase
    try:
        A05_D_UpdateCMDRMetadata.copy_metadata(fgdb_path, SDE_CMDR_FILE_PATH)
    except Exception as e:
        arcpy.AddMessage('Exception Pushing FGDB Metadata: {}'.format(e))

    aa = doTime(
        aa, 'COMPLETED: Finished merging raster footprints and boundaries')
    doTime(aaa, 'COMPLETED: A05_A Completed')
    return errorMsg
def getStatsFields(feature_class=None):
    a = datetime.datetime.now()
    base_fields = [[FIELD_INFO[PATH], "FIRST"], [FIELD_INFO[NAME], "COUNT"],
                   [FIELD_INFO[V_NAME],
                    "FIRST"], [FIELD_INFO[V_UNIT], "FIRST"],
                   [FIELD_INFO[H_NAME],
                    "FIRST"], [FIELD_INFO[H_UNIT], "FIRST"],
                   [FIELD_INFO[H_WKID], "FIRST"],
                   [FIELD_INFO[NODATA_VALUE], "FIRST"],
                   [FIELD_INFO[AREA], "SUM"], [FIELD_INFO[ELEV_TYPE], "FIRST"],
                   [FIELD_INFO[MAX], "MAX"], [FIELD_INFO[MEAN], "MEAN"],
                   [FIELD_INFO[MIN], "MIN"], [FIELD_INFO[RANGE], "MAX"],
                   [FIELD_INFO[STAND_DEV], "MEAN"], [FIELD_INFO[XMIN], "MIN"],
                   [FIELD_INFO[YMIN], "MIN"], [FIELD_INFO[XMAX], "MAX"],
                   [FIELD_INFO[YMAX], "MAX"], [FIELD_INFO[WIDTH], "MEAN"],
                   [FIELD_INFO[HEIGHT], "MEAN"],
                   [FIELD_INFO[MEAN_CELL_WIDTH], "MEAN"],
                   [FIELD_INFO[MEAN_CELL_HEIGHT], "MEAN"],
                   [FIELD_INFO[BAND_COUNT], "MAX"],
                   [FIELD_INFO[FORMAT],
                    "FIRST"], [FIELD_INFO[HAS_RAT], "FIRST"],
                   [FIELD_INFO[IS_INT], "FIRST"],
                   [FIELD_INFO[IS_TEMP], "FIRST"],
                   [FIELD_INFO[PIXEL_TYPE], "FIRST"],
                   [FIELD_INFO[UNCOMP_SIZE], "MEAN"]]

    # @TODO: Delete fields that don't exist in a given feature class?

    field_alter = []
    for base_field in base_fields:
        field_name = "{}_{}".format(base_field[1], base_field[0][0])
        new_field_name = base_field[0][0]
        new_field_alias = base_field[0][1]
        new_field = [field_name, new_field_name, new_field_alias]

        if new_field[0] == "COUNT_name":
            new_field = [field_name, field_name, "Number of Raster Files"]
        field_alter.append(new_field)
        # arcpy.AddMessage("Alter Field Name: '{}'".format(new_field))

    existing_fieldnames = None
    if feature_class is not None:
        existing_fieldnames = [
            field.name for field in arcpy.ListFields(feature_class)
        ]

    summary_fields = []
    for base_field in base_fields:
        base_field_info = base_field[0]
        if existing_fieldnames is not None and (
                False if base_field_info[0] in existing_fieldnames else True):
            arcpy.AddMessage("Adding field {} to {}".format(
                base_field_info, feature_class))
            addFieldIfMissing(feature_class, existing_fieldnames,
                              base_field_info)

        base_field_op = base_field[1]
        summary_field = "{} {}".format(base_field_info[0], base_field_op)
        summary_fields.append(summary_field)
        # arcpy.AddMessage("Summary Field Name: '{}'".format(summary_field))

    summary_string = ";".join(summary_fields)

    a = doTime(a, "Summary String")
    return summary_string, field_alter
Exemple #19
0
    sdPath = sys.argv[
        2]  #.replace('aiotxftw6na01data', 'aiotxftw6na01') #Added replace method 22 Mar 2019 BJN
    arcpy.AddMessage('sdPath = ' + sdPath)
    serverConnectionFile = sys.argv[
        3]  #.replace('aiotxftw6na01data', 'aiotxftw6na01') #Added replace method 22 Mar 2019 BJN
    arcpy.AddMessage('serverConnectionFile = ' + serverConnectionFile)
    if len(sys.argv) > 4:
        startupType = sys.argv[4]
    printArguments(
        ["sddraftPath", "sdPath", "serverConnectionFile", "startupType"],
        [sddraftPath, sdPath, serverConnectionFile, startupType],
        "A07_B_StageSD")

    arcpy.AddMessage("Staging draft {} to service definition {}".format(
        sddraftPath, sdPath))
    arcpy.StageService_server(sddraftPath, sdPath)
    addToolMessages()

    arcpy.AddMessage("Uploading service definition {} to server {}".format(
        sdPath, serverConnectionFile))
    arcpy.UploadServiceDefinition_server(
        in_sd_file=sdPath,
        in_server=serverConnectionFile,
        in_startupType=startupType,
        in_my_contents="NO_SHARE_ONLINE",
        in_public="PRIVATE",
        in_organization="NO_SHARE_ORGANIZATION")
    addToolMessages()

    doTime(aaa, "Service publish completed {}".format(sdPath))
Exemple #20
0
def RevalueRaster(f_path, elev_type, raster_props, target_path, publish_path, minZ, maxZ, bound_path, spatial_ref=None):
    arcpy.AddMessage("RevalueRaster {} {}: ZRange({},{})".format(elev_type, f_path,minZ,maxZ))
    Utility.setArcpyEnv(is_overwrite_output=True)
    a = datetime.now()
    nodata = RasterConfig.NODATA_DEFAULT
    isInt = (elev_type == INT)
    if isInt:
        minZ, maxZ = 0, 255
        arcpy.AddMessage("RevalueRaster type {} is intensity {}: ZRange({},{})".format(elev_type, f_path,minZ,maxZ))

    f_name, target_f_path, publish_f_path, stat_out_folder, stat_file_path, bound_out_folder, vector_bound_path = getFilePaths(f_path, elev_type, target_path, publish_path)  # @UnusedVariable

#     target_f_left, target_f_right = os.path.splitext(target_f_path)
#     target1_f_path = "{}1{}".format(target_f_left, target_f_right)

    publish_f_left, publish_f_right = os.path.splitext(publish_f_path)
    publish1_f_path = "{}1{}".format(publish_f_left, publish_f_right)

    # Don't maintain fGDB raster format, update to TIFF
#     if raster_props[FORMAT] == "FGDBR":
#         target_f_path = "{}.TIF".format(target_f_path)


    if raster_props[BAND_COUNT] <> 1:
        arcpy.AddMessage("Skipping Raster {}, not 1 band image.".format(f_path))
    else:
        # Intensity may be another type
        if not isInt and not (raster_props[PIXEL_TYPE] == PIXEL_TYPE_F32 or raster_props[PIXEL_TYPE] == PIXEL_TYPE_D64):
            arcpy.AddMessage("Skipping Raster '{}', '{}' not Float32 type image.".format(f_path, raster_props[PIXEL_TYPE]))
        else:
            if not (raster_props[FORMAT] == "TIFF" or raster_props[FORMAT] == "GRID" or raster_props[FORMAT] == "IMAGINE Image" or raster_props[FORMAT] == "FGDBR"):
                arcpy.AddMessage("Skipping Raster '{}', '{}' not supported image format.".format(f_path, raster_props[FORMAT]))
            else:

                if arcpy.Exists(target_f_path):
                    arcpy.AddMessage("\tDerived Raster exists: {}".format(target_f_path))
                else:
                    deleteFileIfExists(target_f_path, True)
                    arcpy.AddMessage("\tSaving derived raster to {}".format(target_f_path))

                    # Compression isn't being applied properly so results are uncompressed
                    rasterObject = arcpy.Raster(f_path)
                    if isInt:
                        mean = rasterObject.mean
                        stdDev = rasterObject.standardDeviation
                        maximumPixel = mean + (stdDev * 2)
                        linearTransform = arcpy.sa.TfLinear(maximum=maximumPixel, upperThreshold=maximumPixel)
                        outRescale = arcpy.sa.RescaleByFunction(rasterObject, linearTransform, minZ, maxZ)
                        outRescale.save(target_f_path)
                        del outRescale, rasterObject
                    else:
                        outSetNull = arcpy.sa.Con(((rasterObject >= (float(minZ))) & (rasterObject <= (float(maxZ)))), f_path)  # @UndefinedVariable
                        outSetNull.save(target_f_path)
                        del outSetNull, rasterObject

                    if spatial_ref is not None:
                        arcpy.AddMessage("Applying projection to raster '{}' {}".format(target_f_path, spatial_ref))
                        if str(spatial_ref).lower().endswith(".prj"):
                            arcpy.AddMessage("loading spatial reference from prj file '{}'".format(spatial_ref))
                            spatial_ref = arcpy.SpatialReference(spatial_ref)
                            arcpy.AddMessage("loaded spatial reference from prj file '{}'".format(spatial_ref))
                        # 3/22/18 - Handle UTF-8 Encoding - 'u\u2013' From MI Delta
                        try:
                            arcpy.AddMessage("Applying projection '{}'".format( spatial_ref))
                            arcpy.AddMessage("Applying string projection '{}'".format( spatial_ref.exportToString()))
                            arcpy.AddMessage("Applying encoded projection '{}'".format( spatial_ref.exportToString().encode('utf-8')))
                        except Exception as e:
                            arcpy.AddMessage('Error: {}'.format(e))

                        arcpy.DefineProjection_management(in_dataset=target_f_path, coor_system=spatial_ref)

                    # Set the no data default value on the input raster
                    arcpy.SetRasterProperties_management(in_raster=target_f_path, data_type="ELEVATION", nodata="1 {}".format(nodata))
                    arcpy.CalculateStatistics_management(in_raster_dataset=target_f_path, x_skip_factor="1", y_skip_factor="1", ignore_values="", skip_existing="OVERWRITE", area_of_interest="Feature Set")
#                     arcpy.BuildPyramidsandStatistics_management(in_workspace=target_f_path,
#                                                                 build_pyramids="BUILD_PYRAMIDS",
#                                                                 calculate_statistics="CALCULATE_STATISTICS",
#                                                                 BUILD_ON_SOURCE="BUILD_ON_SOURCE",
#                                                                 pyramid_level="-1",
#                                                                 SKIP_FIRST="NONE",
#                                                                 resample_technique="BILINEAR",
#                                                                 compression_type="LZ77",
#                                                                 compression_quality="75",
#                                                                 skip_existing="SKIP_EXISTING")


                    # make sure we make a new published copy of this
                    if arcpy.Exists(publish_f_path):
                        arcpy.Delete_management(publish_f_path)

                    a = doTime(a, "\tCopied '{}' to '{}' with valid values between {} and {}".format(f_path, target_f_path, minZ, maxZ))


                if arcpy.Exists(publish_f_path):
                    arcpy.AddMessage("\tPublish Raster exists: {}".format(publish_f_path))
                else:
                    arcpy.AddMessage("\tCopy and clip published raster from {} to {}".format(target_f_path, publish1_f_path))
                    a = datetime.now()

                    deleteFileIfExists(publish1_f_path, True)
                    deleteFileIfExists(publish_f_path, True)
                    # arcpy.RasterToOtherFormat_conversion(target_f_path, publish_f_path, Raster_Format="TIFF")
                    arcpy.CopyRaster_management(in_raster=target_f_path, out_rasterdataset=publish1_f_path, config_keyword="", background_value="", nodata_value=nodata, onebit_to_eightbit="NONE", colormap_to_RGB="NONE", pixel_type="32_BIT_FLOAT", scale_pixel_value="NONE", RGB_to_Colormap="NONE", format="TIFF", transform="NONE")

                    arcpy.AddMessage("\tCliping temp raster {} to {}".format(publish1_f_path, publish_f_path))
                    arcpy.Clip_management(in_raster=publish1_f_path, out_raster=publish_f_path, in_template_dataset=bound_path, nodata_value=nodata, clipping_geometry="ClippingGeometry", maintain_clipping_extent="NO_MAINTAIN_EXTENT")

                    deleteFileIfExists(publish1_f_path, True)

                    arcpy.SetRasterProperties_management(in_raster=publish_f_path, data_type="ELEVATION", nodata="1 {}".format(nodata))
                    arcpy.CalculateStatistics_management(in_raster_dataset=publish_f_path, x_skip_factor="1", y_skip_factor="1", ignore_values="", skip_existing="OVERWRITE", area_of_interest="Feature Set")
#                     arcpy.BuildPyramidsandStatistics_management(in_workspace=publish_f_path,
#                                                                 build_pyramids="BUILD_PYRAMIDS",
#                                                                 calculate_statistics="CALCULATE_STATISTICS",
#                                                                 BUILD_ON_SOURCE="BUILD_ON_SOURCE",
#                                                                 pyramid_level="-1",
#                                                                 SKIP_FIRST="NONE",
#                                                                 resample_technique="BILINEAR",
#                                                                 compression_type="LZ77",
#                                                                 compression_quality="75",
#                                                                 skip_existing="SKIP_EXISTING")

                    a = doTime(a, "\tCopied '{}' to '{}'".format(target_f_path, publish_f_path))
def processRastersInFolder(fileList,
                           target_path,
                           publish_path,
                           elev_type,
                           bound_path,
                           z_min,
                           z_max,
                           v_name,
                           v_unit,
                           h_name,
                           h_unit,
                           h_wkid,
                           spatial_ref,
                           runAgain=True):
    a = datetime.now()
    path = os.path.join(RunUtil.TOOLS_PATH, "A05_B_RevalueRaster.py")
    Utility.printArguments([
        "fileList", "target_path", "publish_path", "elev_type", "bound_path",
        "spatial_ref", "runAgain"
    ], [(None if fileList is None else len(fileList)), target_path,
        publish_path, elev_type, bound_path, spatial_ref, runAgain],
                           "createLasStatistics")

    grouping = PROCESS_CHUNKS
    if not runAgain:
        grouping = int(PROCESS_CHUNKS / 2)
    if grouping <= 1:
        grouping = 2
    total = len(fileList)
    if total > 0:

        fileList_repeat = []

        procCount = int(os.environ['NUMBER_OF_PROCESSORS'])
        if procCount > 4:
            procCount = procCount - PROCESS_SPARES
        if procCount <= 0:
            procCount = 1
        arcpy.AddMessage(
            "processRastersInFolder: Using {}/{} Processors to process {} files in groups of {}"
            .format(procCount, (procCount + PROCESS_SPARES), total, grouping))
        processList = []

        indx = 0
        for f_paths in grouper(fileList, grouping):
            f_paths = [x for x in f_paths if x is not None]
            f_path = ",".join(f_paths)
            indx = indx + len(f_paths)

            arcpy.AddMessage(
                '       processRastersInFolder: Working on {} {}/{}'.format(
                    elev_type, indx, total))
            args = [
                f_path, elev_type, target_path, publish_path, bound_path,
                str(z_min),
                str(z_max), v_name, v_unit, h_name, h_unit,
                str(h_wkid), spatial_ref
            ]

            try:
                processList.append(
                    RunUtil.runToolx64_async(path, args, "A05_B", target_path))
                # give time for things to wake up
                time.sleep(PROCESS_DELAY)
            except:
                tb = sys.exc_info()[2]
                tbinfo = traceback.format_tb(tb)[0]
                pymsg = "processRastersInFolder: PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n    " + \
                        str(sys.exc_type) + ": " + str(sys.exc_value) + "\n"
                arcpy.AddWarning(pymsg)
                msgs = "processRastersInFolder: GP ERRORS:\n" + arcpy.GetMessages(
                    2) + "\n"
                arcpy.AddWarning(msgs)
                sys.exit(1)

            waitForResults = True
            first = True
            while waitForResults:
                if not first:
                    time.sleep(1)
                first = False
                # arcpy.AddMessage('processRastersInFolder: Looping LEN Process List = {} ProcCount = {} is greater = {}'.format(len(processList), procCount, (len(processList) >= procCount)))
                for i, [p, l] in enumerate(processList):
                    if p.poll() is not None:
                        # error log messages are handled in
                        retCode = RunUtil.endRun_async(path, p, l)
                        if retCode <> 0:
                            fileList_repeat.append(f_path)
                        del processList[i]

                waitForResults = (len(processList) >= int(procCount))

        # Wait for last subprocesses to complete
        arcpy.AddMessage(
            "       processRastersInFolder: Waiting for process list to clear {} jobs"
            .format(len(processList)))
        while len(processList) > 0:
            for i, [p, l] in enumerate(processList):
                if p.poll() is not None:
                    RunUtil.endRun_async(path, p, l)
                    del processList[i]
                    arcpy.AddMessage(
                        "       processRastersInFolder: Waiting for process list to clear {} jobs"
                        .format(len(processList)))

                else:
                    # arcpy.AddMessage("processRastersInFolder: Waiting for process list to clear {} jobs".format(len(processList)))
                    time.sleep(PROCESS_DELAY)

        if runAgain and len(fileList_repeat) > 0:
            # try to clean up any errors along the way
            processRastersInFolder(fileList,
                                   target_path,
                                   publish_path,
                                   elev_type,
                                   bound_path,
                                   z_min,
                                   z_max,
                                   v_name,
                                   v_unit,
                                   h_name,
                                   h_unit,
                                   h_wkid,
                                   spatial_ref,
                                   runAgain=False)

    try:
        shutil.rmtree(os.path.join(target_path, elev_type, "TEMP"), True)
    except:
        pass

    doTime(a, 'processRastersInFolder: All jobs completed.')
Exemple #22
0
def checkSpatialOnLas(start_dir, target_path, createQARasters, isClassified):
    las_spatial_ref = None
    prj_spatial_ref = None

    las_f_path = getLasFileProcessList(start_dir,
                                       target_path,
                                       createQARasters,
                                       isClassified,
                                       returnFirst=True)
    lasd_f_path = "{}d".format(las_f_path)

    a = datetime.now()
    deleteFileIfExists(lasd_f_path, True)
    arcpy.AddMessage(
        "{} Testing spatial reference on .las file: '{}' '{}'".format(
            datetime.now(), las_f_path, lasd_f_path))

    arcpy.CreateLasDataset_management(input=las_f_path,
                                      spatial_reference=None,
                                      out_las_dataset=lasd_f_path,
                                      folder_recursion="NO_RECURSION",
                                      in_surface_constraints="",
                                      compute_stats="COMPUTE_STATS",
                                      relative_paths="RELATIVE_PATHS",
                                      create_las_prj="NO_FILES")

    doTime(a, "\t{} Created LASD {}".format(datetime.now(), lasd_f_path))

    desc = arcpy.Describe(lasd_f_path)
    if desc is not None:
        las_spatial_ref = desc.SpatialReference
        if las_spatial_ref is not None:
            try:
                arcpy.AddMessage("\tFound spatial reference in LAS: {}".format(
                    las_spatial_ref.exportToString()))
            except:
                pass

    prj_Count, prj_File = Utility.fileCounter(start_dir, '.prj')
    arcpy.AddMessage("\tFound {} PRJ files, the first is: {}".format(
        prj_Count, prj_File))
    if prj_Count > 0 and prj_File is not None and len(str(prj_File)) > 0:
        prj_Path = os.path.join(start_dir, prj_File)
        arcpy.AddMessage(
            "\tReading spatial reference from PRJ file: {}".format(prj_Path))

        prj_spatial_ref = arcpy.SpatialReference(prj_Path)
        arcpy.AddMessage("\tGot from PRJ file spatial reference: {}".format(
            prj_spatial_ref.name))
        if prj_spatial_ref is not None:
            try:
                arcpy.AddMessage("\tFound spatial reference in PRJ: {}".format(
                    prj_spatial_ref.exportToString()))
            except:
                pass

    arcpy.AddMessage("Decoding LAS File Spatial Reference")
    las_horz_cs_name, las_horz_cs_unit_name, las_horz_cs_factory_code, las_vert_cs_name, las_vert_cs_unit_name = Utility.getSRValues(
        las_spatial_ref)

    prj_horz_cs_name, prj_horz_cs_unit_name, prj_horz_cs_factory_code, prj_vert_cs_name, prj_vert_cs_unit_name = None, None, None, None, None
    if prj_spatial_ref is not None:
        arcpy.AddMessage("Decoding PRJ File Spatial Reference")
        prj_horz_cs_name, prj_horz_cs_unit_name, prj_horz_cs_factory_code, prj_vert_cs_name, prj_vert_cs_unit_name = Utility.getSRValues(
            prj_spatial_ref)

    arcpy.AddMessage(
        "LAS File Spatial Reference:\n\tH_Name: '{}'\n\tH_Unit: '{}'\n\tH_WKID: '{}'\n\tV_Name: '{}'\n\tV_Unit: '{}'"
        .format(las_horz_cs_name, las_horz_cs_unit_name,
                las_horz_cs_factory_code, las_vert_cs_name,
                las_vert_cs_unit_name))
    arcpy.AddMessage(
        "PRJ File Spatial Reference:\n\tH_Name: '{}'\n\tH_Unit: '{}'\n\tH_WKID: '{}'\n\tV_Name: '{}'\n\tV_Unit: '{}'"
        .format(prj_horz_cs_name, prj_horz_cs_unit_name,
                prj_horz_cs_factory_code, prj_vert_cs_name,
                prj_vert_cs_unit_name))

    prj_horz_name_isValid = isSrValueValid(prj_horz_cs_name)
    prj_vert_name_isValid = isSrValueValid(prj_vert_cs_name)
    prj_horz_unit_isValid = isSrValueValid(prj_horz_cs_unit_name)
    prj_vert_unit_isValid = isSrValueValid(prj_vert_cs_unit_name)

    las_horz_name_isValid = isSrValueValid(las_horz_cs_name)
    las_vert_name_isValid = isSrValueValid(las_vert_cs_name)
    las_horz_unit_isValid = isSrValueValid(las_horz_cs_unit_name)
    las_vert_unit_isValid = isSrValueValid(las_vert_cs_unit_name)

    prj_isValid = prj_horz_name_isValid and prj_vert_name_isValid and prj_horz_unit_isValid and prj_vert_unit_isValid

    las_isValid = las_horz_name_isValid and las_vert_name_isValid and las_horz_unit_isValid and las_vert_unit_isValid

    sr_horz_name_isSame = prj_horz_name_isValid and las_horz_name_isValid and prj_horz_cs_name == las_horz_cs_name
    sr_horz_unit_isSame = prj_horz_unit_isValid and las_horz_unit_isValid and prj_horz_cs_unit_name == las_horz_cs_unit_name
    sr_vert_name_isSame = prj_vert_name_isValid and las_vert_name_isValid and prj_vert_cs_name == las_vert_cs_name
    sr_vert_unit_isSame = prj_vert_unit_isValid and las_vert_unit_isValid and prj_vert_cs_unit_name == las_vert_cs_unit_name

    sr_horz_isSame = sr_horz_name_isSame and sr_horz_unit_isSame
    sr_vert_isSame = sr_vert_name_isSame and sr_vert_unit_isSame

    sr_isSame = sr_horz_isSame and sr_vert_isSame

    if prj_isValid or las_isValid:
        if sr_horz_isSame:
            arcpy.AddMessage(
                "         The LAS and PRJ horizontal spatial references MATCH".
                format(Utility.getSpatialReferenceInfo(prj_spatial_ref)))
        else:
            arcpy.AddWarning(
                "WARNING: The LAS and PRJ horizontal spatial references DO NOT MATCH."
            )

        if sr_vert_isSame:
            arcpy.AddMessage(
                "         The LAS and PRJ vertical spatial references MATCH".
                format(Utility.getSpatialReferenceInfo(prj_spatial_ref)))
        else:
            arcpy.AddWarning(
                "WARNING: The LAS and PRJ vertical spatial references DO NOT MATCH."
            )

        if sr_isSame:
            arcpy.AddMessage(
                "         The LAS and PRJ spatial references MATCH".format(
                    Utility.getSpatialReferenceInfo(prj_spatial_ref)))
        else:
            arcpy.AddWarning(
                "WARNING: The LAS and PRJ spatial references DO NOT MATCH.")

    result = None
    if prj_isValid:
        arcpy.AddMessage(
            "         Found a valid spatial reference in a PRJ file. Using this as the spatial reference: {}"
            .format(Utility.getSpatialReferenceInfo(prj_spatial_ref)))
        result = os.path.join(start_dir, prj_File)
    elif las_isValid:
        arcpy.AddMessage(
            "         Found a valid spatial reference in a LAS file. Using this as the spatial reference: {}"
            .format(Utility.getSpatialReferenceInfo(las_spatial_ref)))
        result = las_spatial_ref

    return result
Exemple #23
0
def createRasterBoundaryAndFootprints(fgdb_path, target_path, project_ID, project_path, project_UID):
    a = datetime.datetime.now()

    stat_out_folder = os.path.join(target_path, STAT_LAS_FOLDER)

    b_file_list = []
    c_file_list = []

    for f_name in [f for f in os.listdir(stat_out_folder) if (f.startswith('B_') and f.endswith('.shp'))]:
        b_path = os.path.join(stat_out_folder, f_name)
        c_path = os.path.join(stat_out_folder, "C{}".format(f_name[1:]))

        try:
            if not os.path.exists(b_path):
                arcpy.AddWarning("Failed to find B boundary file {}".format(b_path))
            else:
                b_file_list.append(b_path)
        except:
            pass
        try:
            if not os.path.exists(c_path):
                arcpy.AddWarning("Failed to find C boundary file {}".format(c_path))
            else:
                c_file_list.append(c_path)
                deleteFields(c_path)
        except:
            pass

    a = doTime(a, "Found {} footprints".format(len(b_file_list)))

    try:
        for file_type in [r'C_*.shp', r'B_*.shp']:
            bad_shape_type_list = list(filter(lambda x: arcpy.Describe(x).shapeType != 'Polygon',glob.glob(os.path.join(stat_out_folder, file_type))))
            if len(bad_shape_type_list) > 0:
                for bad_type in bad_shape_type_list:
                    arcpy.AddMessage("ERROR: Bad shape type in file '{}'".format(bad_type))
    except:
        pass

    las_footprint = getLasFootprintPath(fgdb_path)
    lasd_boundary = getLasdBoundaryPath(fgdb_path)
    if arcpy.Exists(las_footprint):
        arcpy.AddMessage("Footprints exist: {}".format(las_footprint))
    else:
        # Delete the boundary if the footprints don't exist (have to recreate anyway)
        deleteFileIfExists(lasd_boundary, True)
        lasd_boundary_B = "{}B".format(lasd_boundary)
        deleteFileIfExists(lasd_boundary_B, True)

        las_footprint_1 = os.path.join(fgdb_path, "{}B1".format(las_footprint))
        las_footprint_2 = os.path.join(fgdb_path, "{}B2".format(las_footprint))
        las_footprint_CP = os.path.join(fgdb_path, "{}B_CP".format(las_footprint))
        a = doTime(a, "\tMerging B footprints to {}".format(las_footprint_2))
        deleteFileIfExists(las_footprint_1, True)
        deleteFileIfExists(las_footprint_2, True)
        deleteFileIfExists(las_footprint_CP, True)
##Old Code        arcpy.Merge_management(inputs=b_file_list, output=las_footprint_2)
##New Code
        #BJN: Reduce memory usage by appending blocks of tiles instead of merging
        #     all of the tiles at once.
        out_path, out_name = os.path.split(las_footprint_2)
        cfs_template = b_file_list[0]
        arcpy.CreateFeatureclass_management(out_path, out_name, "POLYGON", template = cfs_template, spatial_reference = cfs_template)

        append_group_size = 1024 #appending x tiles at a time
        append_group_start = 0  #list offset for the current file group

        while append_group_start < len(b_file_list):
            #set the new file group endpoint
            append_group_end = append_group_start + append_group_size
            #Use the next append_group_size tiles, or up to the end of the list,
            #whichever is smaller.
            append_msg = '{}Adding {} tiles'.format(' '*8, append_group_size)
            if append_group_end < len(b_file_list):
                arcpy.Append_management(b_file_list[append_group_start:append_group_end], las_footprint_2)
            else:
                append_msg = '{}Adding {} tiles'.format(' '*8, len(b_file_list[append_group_start:]))
                arcpy.Append_management(b_file_list[append_group_start:], las_footprint_2)
            arcpy.AddMessage(append_msg)

            #Set the new start point
            append_group_start = append_group_end

        arcpy.Compact_management(out_path)
##End New Code

        #Utility.addToolMessages()
        deleteFields(las_footprint_2)
        arcpy.RepairGeometry_management(in_features=las_footprint_2, delete_null="DELETE_NULL")
        #Utility.addToolMessages()
        a = doTime(a, "\tMerged B and repaired footprints to {}".format(las_footprint_2))

        #arcpy.CreateCartographicPartitions_cartography(in_features=las_footprint_2, out_features=las_footprint_CP, feature_count=PARTITION_COUNT)
        #a = doTime(a, "\tCreated B carto parts")

        #arcpy.env.cartographicPartitions = las_footprint_CP
        #a = doTime(a, "\tSet B cartographic partitions to {}".format(las_footprint_CP))

        #arcpy.SimplifyPolygon_cartography(in_features=las_footprint_2, out_feature_class=las_footprint_1, algorithm="POINT_REMOVE", tolerance="0.5 Meters", minimum_area="0 Unknown", error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP", in_barriers="")
        #Utility.addToolMessages()
        #arcpy.env.cartographicPartitions = None
        #a = doTime(a, "\tSimplified B las footprints to {}".format(las_footprint_1))
        #deleteFileIfExists(las_footprint_2, True)
        #arcpy.RepairGeometry_management(in_features=las_footprint_1, delete_null="DELETE_NULL")
        #Utility.addToolMessages()

        #a = doTime(a, "Merged B las footprints {}".format(las_footprint_1))

        createBoundaryFeatureClass(las_footprint_2, lasd_boundary_B)
        deleteFileIfExists(las_footprint_2, True)
        a = datetime.datetime.now()


        # Merge the other footprints before clipping
        las_footprint_1 = os.path.join(fgdb_path, "{}C1".format(las_footprint))
        las_footprint_2 = os.path.join(fgdb_path, "{}C2".format(las_footprint))
        las_footprint_CP = os.path.join(fgdb_path, "{}C_CP".format(las_footprint))
        deleteFileIfExists(las_footprint_1, True)
        deleteFileIfExists(las_footprint_2, True)
        deleteFileIfExists(las_footprint_CP, True)
        a = doTime(a, "\tMerging C las footprints to {}".format(las_footprint_2))
        arcpy.Merge_management(inputs=c_file_list, output=las_footprint_2)
        #Utility.addToolMessages()
        deleteFields(las_footprint_2)
        arcpy.RepairGeometry_management(in_features=las_footprint_2, delete_null="DELETE_NULL")
        #Utility.addToolMessages()
        a = doTime(a, "\tMerged C las footprints to {}".format(las_footprint_2))

        #arcpy.CreateCartographicPartitions_cartography(in_features=las_footprint_2, out_features=las_footprint_CP, feature_count=PARTITION_COUNT)
        #a = doTime(a, "\tCreated C carto parts")

        #arcpy.env.cartographicPartitions = las_footprint_CP
        #a = doTime(a, "\tSet C cartographic partitions to {}".format(las_footprint_CP))

        #arcpy.SimplifyPolygon_cartography(in_features=las_footprint_2, out_feature_class=las_footprint_1, algorithm="POINT_REMOVE", tolerance="0.5 Meters", minimum_area="0 Unknown", error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP", in_barriers="")
        #Utility.addToolMessages()
        #arcpy.env.cartographicPartitions = None
        #a = doTime(a, "\tSimplified C las footprints to {}".format(las_footprint_1))
        #deleteFileIfExists(las_footprint_2, True)
        #arcpy.RepairGeometry_management(in_features=las_footprint_1, delete_null="DELETE_NULL")
        #Utility.addToolMessages()

        #a = doTime(a, "Merged C las footprints {}".format(las_footprint_1))

        lasd_boundary_C = "{}C".format(lasd_boundary)
        deleteFileIfExists(lasd_boundary_C, True)
        createBoundaryFeatureClass(las_footprint_2, lasd_boundary_C)

        lasd_boundary_SD = "{}_SD".format(lasd_boundary)
        lasd_boundary_SD1 = "{}_SD1".format(lasd_boundary)
        lasd_boundary_SD2 = "{}_SD2".format(lasd_boundary)
        lasd_boundary_SD3 = "{}_SD3".format(lasd_boundary)
        deleteFileIfExists(lasd_boundary_SD, True)
        deleteFileIfExists(lasd_boundary_SD1, True)
        deleteFileIfExists(lasd_boundary_SD2, True)
        deleteFileIfExists(lasd_boundary_SD3, True)
        # Replace a layer/table view name with a path to a dataset (which can be a layer file) or create the layer/table view within the script
        arcpy.SymDiff_analysis(in_features=lasd_boundary_B, update_features=lasd_boundary_C, out_feature_class=lasd_boundary_SD1, join_attributes="ONLY_FID", cluster_tolerance="3 Meters")
        arcpy.Buffer_analysis(in_features=lasd_boundary_SD1, out_feature_class=lasd_boundary_SD2, buffer_distance_or_field="-3 Meters", line_side="FULL", line_end_type="ROUND", dissolve_option="NONE", dissolve_field="", method="PLANAR")
        arcpy.MultipartToSinglepart_management(in_features=lasd_boundary_SD2, out_feature_class=lasd_boundary_SD3)
        arcpy.Buffer_analysis(in_features=lasd_boundary_SD3, out_feature_class=lasd_boundary_SD, buffer_distance_or_field="3 Meters", line_side="FULL", line_end_type="ROUND", dissolve_option="NONE", dissolve_field="", method="PLANAR")
        arcpy.DeleteField_management(in_table=lasd_boundary_SD, drop_field="FID_BoundaryLASDatasetB;FID_BoundaryLASDatasetC;BUFF_DIST;ORIG_FID")

        deleteFileIfExists(lasd_boundary_C, True)
        deleteFileIfExists(lasd_boundary_SD1, True)
        deleteFileIfExists(lasd_boundary_SD2, True)
        deleteFileIfExists(lasd_boundary_SD3, True)

        a = doTime(a, "Created symetrical difference in boundaries {}".format(lasd_boundary_SD))

        checkNullFields(las_footprint_2)
        a = datetime.datetime.now()

        deleteFileIfExists(las_footprint, True)
        arcpy.Clip_analysis(in_features=las_footprint_2, clip_features=lasd_boundary_B, out_feature_class=las_footprint, cluster_tolerance="")
        #deleteFileIfExists(las_footprint_1, True)
        deleteFileIfExists(lasd_boundary_B, True)

        #deleteFileIfExists(lasd_boundary, True)
        #deleteFileIfExists(las_footprint_CP, True)
        #arcpy.env.cartographicPartitions = None

        deleteFileIfExists(las_footprint_2, True)
        a = doTime(a, "Clipped las footprints to dataset boundary {} ".format(las_footprint))

    deleteFields(las_footprint)
    try:
        arcpy.RepairGeometry_management(in_features=las_footprint, delete_null="KEEP_NULL")
    except:
        pass

    if arcpy.Exists(lasd_boundary):
        arcpy.AddMessage("Boundary exists: {}".format(lasd_boundary))
    else:
        deleteFileIfExists(lasd_boundary, True)

        summary_string, field_alter = getStatsFields(las_footprint)
        createBoundaryFeatureClass(las_footprint, lasd_boundary, summary_string, field_alter)

        addProjectInfo(las_footprint, lasd_boundary, project_ID, project_path, project_UID)

    deleteFields(lasd_boundary)

    try:
        arcpy.RepairGeometry_management(in_features=lasd_boundary, delete_null="KEEP_NULL")
    except:
        pass

    return lasd_boundary, las_footprint
Exemple #24
0
        "\tf_paths='{}',elev_type='{}',target_path='{}',publish_path='{}',bound_path='{}',z_min='{}', z_max='{}', v_name='{}', v_unit='{}', h_name='{}', h_unit='{}', h_wkid='{}', sr='{}'"
        .format(
            f_paths, elev_type, target_path, publish_path, bound_path, z_min, z_max, v_name, v_unit, h_name, h_unit, h_wkid, spatial_ref
            )
        )

    f_paths = str(f_paths).split(",")

    for f_path in f_paths:
        if not isProcessFile(f_path, elev_type, target_path, publish_path):
            arcpy.AddMessage("\tAll raster file artifacts exist. Ignoring: {}".format(f_path))
        else:
            if not checkedOut:
                checkedOut = True
                arcpy.AddMessage("\tChecking out licenses")
                arcpy.CheckOutExtension("3D")
                arcpy.CheckOutExtension("Spatial")

            processFile(bound_path, f_path, elev_type, target_path, publish_path, z_min, z_max, v_name, v_unit, h_name, h_unit, h_wkid, spatial_ref)

    if checkedOut:
        arcpy.CheckInExtension("3D")
        arcpy.CheckInExtension("Spatial")

    doTime(aaa, "Completed {}".format(f_path))





Exemple #25
0
def getStatsFields(feature_class=None):
    a = datetime.datetime.now()
    base_fields = [
                   [FIELD_INFO[NAME], "COUNT"],
                   [FIELD_INFO[PATH], "FIRST"],
                   [FIELD_INFO[IS_CLASSIFIED], "FIRST"],
                   [FIELD_INFO[V_NAME], "FIRST"],
                   [FIELD_INFO[V_UNIT], "FIRST"],
                   [FIELD_INFO[H_NAME], "FIRST"],
                   [FIELD_INFO[H_UNIT], "FIRST"],
                   [FIELD_INFO[H_WKID], "FIRST"],
                  [FIELD_INFO[AREA], "SUM"],
                  [FIELD_INFO[MAX], "MAX"],
                  [FIELD_INFO[MEAN], "MEAN"],
                  [FIELD_INFO[MIN], "MIN"],
                  [FIELD_INFO[RANGE], "MAX"],
                  [FIELD_INFO[STAND_DEV], "MEAN"],
                  [FIELD_INFO[XMIN], "MIN"],
                  [FIELD_INFO[YMIN], "MIN"],
                  [FIELD_INFO[XMAX], "MAX"],
                  [FIELD_INFO[YMAX], "MAX"]
                  ]

    class_fields = [
                    FIELD_INFO[FIRST_RETURNS],
                    FIELD_INFO[SECOND_RETURNS],
                    FIELD_INFO[THIRD_RETURNS],
                    FIELD_INFO[FOURTH_RETURNS],
                    FIELD_INFO[SINGLE_RETURNS],
                    FIELD_INFO[FIRST_OF_MANY_RETURNS],
                    FIELD_INFO[LAST_OF_MANY_RETURNS],
                    FIELD_INFO[ALL_RETURNS]
                    ]
    value_fields = [
                  [FIELD_INFO[POINT_COUNT], "SUM"],
                  [FIELD_INFO[MAX], "MAX"],
                  [FIELD_INFO[MIN], "MIN"],
                  [FIELD_INFO[RANGE], "MAX"]
                  ]
    class_value_fields = [
                  [FIELD_INFO[POINT_COUNT], "SUM"],
                  [FIELD_INFO[POINT_SPACING], "MEAN"],
                  [FIELD_INFO[MAX], "MAX"],
                  [FIELD_INFO[MIN], "MIN"],
                  [FIELD_INFO[RANGE], "MAX"]
                  ]
    for clazz in range(0, 18):
        if clazz <> 7 and clazz <> 18:
            value = "c{}".format(("0{}".format(clazz))[-2:])
            class_fields.append([value, "Class {}".format(value), "", ""])
    for class_field_info in class_fields:
        class_field = class_field_info[0]
        field_values = value_fields
        if class_field.startswith("c") or class_field.startswith("ra"):
            field_values = class_value_fields
        for value_field_record in field_values:
            value_field_info = value_field_record[0]
            value_field_summary = value_field_record[1]
            base_fields.append([["{}_{}".format(class_field_info[0], value_field_info[0]), "{} {}".format(class_field_info[1], value_field_info[1]) , value_field_info[2], value_field_info[3]], value_field_summary])


    field_alter = []
    for base_field in base_fields:
        field_name = "{}_{}".format(base_field[1], base_field[0][0])
        new_field_name = base_field[0][0]
        new_field_alias = base_field[0][1]
        new_field = [field_name, new_field_name, new_field_alias]

        if new_field[0] == "COUNT_name":
            new_field = [field_name, field_name, "Number of LAS Files"]
        field_alter.append(new_field)
        # arcpy.AddMessage("Alter Field Name: '{}'".format(new_field))

    existing_fieldnames = None
    if feature_class is not None:
        existing_fieldnames = [field.name for field in arcpy.ListFields(feature_class)]

    summary_fields = []
    for base_field in base_fields:
        base_field_info = base_field[0]
        if existing_fieldnames is not None and (False if base_field_info[0] in existing_fieldnames else True):
            #arcpy.AddMessage("Adding field {} to {}".format(base_field_info, feature_class))
            addFieldIfMissing(feature_class, existing_fieldnames, base_field_info)

        base_field_op = base_field[1]
        summary_field = "{} {}".format(base_field_info[0], base_field_op)
        summary_fields.append(summary_field)
        # arcpy.AddMessage("Summary Field Name: '{}'".format(summary_field))

    summary_string = ";".join(summary_fields)

    #arcpy.AddMessage("Summary String = '{}'".format(summary_string))
    a = doTime(a, "Created Summary String")
    return summary_string, field_alter
def createRasterBoundaryAndFootprints(fgdb_path, target_path, project_ID,
                                      project_path, project_UID, elev_type):
    a = datetime.datetime.now()
    raster_footprint = None
    raster_boundary = None

    stat_out_folder = os.path.join(target_path, STAT_RASTER_FOLDER, elev_type)
    if not os.path.exists(stat_out_folder):
        arcpy.AddMessage(
            "Raster statistics for elevation type don't exist: {}".format(
                stat_out_folder))
    else:

        b_file_list = []
        for f_name in [
                f for f in os.listdir(stat_out_folder)
                if (f.startswith('B_') and f.endswith('.shp'))
        ]:
            b_path = os.path.join(stat_out_folder, f_name)

            try:
                if not os.path.exists(b_path):
                    arcpy.AddWarning(
                        "Failed to find B boundary file {}".format(b_path))
                else:
                    b_file_list.append(b_path)
            except:
                pass

        a = doTime(a, "Found boundaries {}".format(len(b_file_list)))

        raster_footprint = getRasterFootprintPath(fgdb_path, elev_type)
        raster_boundary = getRasterBoundaryPath(fgdb_path, elev_type)
        if arcpy.Exists(raster_footprint):
            arcpy.AddMessage(
                "Raster Footprints exist: {}".format(raster_footprint))
        else:

            deleteFileIfExists(raster_footprint, True)
            arcpy.Merge_management(inputs=b_file_list, output=raster_footprint)
            arcpy.RepairGeometry_management(in_features=raster_footprint,
                                            delete_null="DELETE_NULL")
            deleteFields(raster_footprint)

            field_alter = []
            for base_field in FIELD_INFO:
                field_name = base_field[0]
                new_field_name = base_field[0]
                new_field_alias = base_field[1]
                new_field = [field_name, new_field_name, new_field_alias]

                field_alter.append(new_field)
            alterFields(field_alter, raster_footprint)

            a = doTime(a,
                       "Merged raster footprints {}".format(raster_footprint))

            arcpy.RepairGeometry_management(in_features=raster_footprint,
                                            delete_null="DELETE_NULL")
            deleteFields(raster_footprint)

        if arcpy.Exists(raster_boundary):
            arcpy.AddMessage(
                "Raster Boundary exist: {}".format(raster_boundary))
        else:
            summary_string, field_alter = getStatsFields(raster_footprint)
            createBoundaryFeatureClass(raster_footprint, raster_boundary,
                                       summary_string, field_alter)

            addProjectInfo(raster_footprint, raster_boundary, project_ID,
                           project_path, project_UID)

        # Buffer Footprint @ 1 Meter & Clip to Avoid Gaps in Output Mosaic
        one_meter_buffer = arcpy.Buffer_analysis(
            raster_footprint,
            os.path.join(fgdb_path,
                         '{}_2m'.format(os.path.split(raster_footprint)[1])),
            '2 METER')
        arcpy.RepairGeometry_management(in_features=one_meter_buffer,
                                        delete_null="DELETE_NULL")
        deleteFields(one_meter_buffer)

        arcpy.Clip_analysis(one_meter_buffer, raster_boundary,
                            raster_footprint)
        arcpy.RepairGeometry_management(in_features=raster_footprint,
                                        delete_null="DELETE_NULL")
        deleteFields(raster_footprint)

        arcpy.Delete_management(one_meter_buffer)

        ## ?? DUPLICATE Code Block??
        # Buffer Footprint @ 1 Meter & Clip to Avoid Gaps in Output Mosaic
        #one_meter_buffer = arcpy.Buffer_analysis(
        #    raster_footprint,
        #    os.path.join(fgdb_path, '{}_1m'.format(os.path.split(raster_footprint)[1])),
        #    '1 METER'
        #    )
        #arcpy.Clip_analysis(one_meter_buffer, raster_boundary, raster_footprint)
        #arcpy.Delete_management(one_meter_buffer)
        #deleteFields(raster_footprint)

    return raster_footprint, raster_boundary
Exemple #27
0
from ngce.Utility import doTime, addToolMessages, printArguments

if __name__ == '__main__':

    # time parameters to gauge how much time things are taking
    aaa = datetime.now()

    sddraftPath = sys.argv[1]
    sdPath = sys.argv[2]
    serverConnectionFile = sys.argv[3]
    startupType = sys.argv[4]
    printArguments(
        ["sddraftPath", "sdPath", "serverConnectionFile", "startupType"],
        [sddraftPath, sdPath, serverConnectionFile, startupType],
        "A07_B_StageSD")

    arcpy.StageService_server(sddraftPath, sdPath)
    addToolMessages()
    arcpy.AddMessage("Publishing mosaic data set as image service.")

    arcpy.UploadServiceDefinition_server(
        in_sd_file=sdPath,
        in_server=serverConnectionFile,
        in_startupType=startupType,
        in_my_contents="NO_SHARE_ONLINE",
        in_public="PRIVATE",
        in_organization="NO_SHARE_ORGANIZATION")
    addToolMessages()

    doTime(aaa, "Completed {}".format(sdPath))
Exemple #28
0
def ImportContourCacheToMaster(jobID,
                               serverConnectionFilePath,
                               masterServiceName,
                               update=False,
                               runCount=0):
    a = datetime.datetime.now()
    aa = a
    cache_dir = ContourConfig.CACHE_FOLDER
    #@TODO: Remove this workaround once fix is validated on NGCE
    if serverConnectionFilePath is None or len(
            str(serverConnectionFilePath)) <= 1 or str(
                serverConnectionFilePath).lower().find(
                    "aiotxftw3gi013".lower()) < 0:
        serverConnectionFilePath = "//aiotxftw6na01data/SMB03/elevation/WorkflowManager/arcgis on aiotxftw3gi013.usda.net"
    Utility.printArguments([
        "WMX Job ID", "serverConnectionFilePath", "cache_dir",
        "masterServiceName", "update", "runCount"
    ], [
        jobID, serverConnectionFilePath, cache_dir, masterServiceName, update,
        runCount
    ], "C04 ImportContourCacheToMaster")

    Utility.setWMXJobDataAsEnvironmentWorkspace(jobID)

    ProjectJob = CMDR.ProjectJob()
    project, ProjectUID = ProjectJob.getProject(jobID)  # @UnusedVariable

    if project is not None:
        projectID = ProjectJob.getProjectID(project)

        ProjectFolder = ProjectFolders.getProjectFolderFromDBRow(
            ProjectJob, project)
        #         con_folder = ProjectFolder.derived.contour_path
        #         contour_file_gdb_path = os.path.join(con_folder, CONTOUR_GDB_NAME)
        #         PublishFolder = ProjectFolder.published.path
        derived_filegdb_path = ProjectFolder.derived.fgdb_path
        #         contourMerged_Name = (ContourConfig.MERGED_FGDB_NAME).format(projectID)
        #         contourMerged_Name = in_cont_fc = os.path.join(contour_file_gdb_path, CONTOUR_NAME_WM)
        #         contour_pub_file_gdb_path = os.path.join(PublishFolder, contourMerged_Name)
        #         contourMxd_Name = ContourConfig.CONTOUR_MXD_NAME
        #         contourMxd_path = os.path.join(PublishFolder, contourMxd_Name)
        #         ContourFC = os.path.join(contour_pub_file_gdb_path, ContourConfig.CONTOUR_FC_WEBMERC)
        #         ContourBoundFC = os.path.join(contour_pub_file_gdb_path, ContourConfig.CONTOUR_BOUND_FC_WEBMERC)ContourBoundFC = A05_C_ConsolidateRasterInfo.getRasterBoundaryPath(derived_filegdb_path, DTM)
        ContourBoundFC = A05_C_ConsolidateRasterInfo.getRasterBoundaryPath(
            derived_filegdb_path, DTM)

        projectServiceName = "{}_{}".format(
            projectID, ContourConfig.CONTOUR_2FT_SERVICE_NAME
        )  # arcpy.GetParameterAsText(3)
        projectFolder = ProjectJob.getState(
            project)  # arcpy.GetParameterAsText(4)

        # Get input parameters
        projectCache = os.path.join(ContourConfig.CACHE_FOLDER,
                                    projectServiceName, "Layers")
        if projectFolder is not None and len(projectFolder) > 0:
            projectCache = os.path.join(
                ContourConfig.CACHE_FOLDER,
                "{}_{}".format(projectFolder, projectServiceName),
                "Layers")  # arcpy.GetParameterAsText(0)          #YES
        areaOfInterest = ContourBoundFC  # arcpy.GetParameterAsText(1)   #YES
        #         serverConnectionFilePath = serverConnectionFilePath  # arcpy.GetParameterAsText(2)

        masterService = os.path.join(
            serverConnectionFilePath, "{}_{}.MapServer".format(
                masterServiceName,
                ContourConfig.CONTOUR_2FT_SERVICE_NAME))  # YES
        if serverConnectionFilePath.endswith(".ags"):
            masterService = os.path.join(
                serverConnectionFilePath[:-4], "{}_{}.MapServer".format(
                    masterServiceName, ContourConfig.CONTOUR_2FT_SERVICE_NAME))
        arcpy.AddMessage(
            "Location of master service is: {0}".format(masterService))
        #         scales = ContourConfig.CONTOUR_SCALES_STRING

        #-------------------------------------------------------------------------------
        #-------------------------------------------------------------------------------
        # The following paths and values can be modified if needed

        # Other map service properties
        #         cachingInstances =   ContourConfig.CACHE_INSTANCES# This should be increased based on server resources
        #-------------------------------------------------------------------------------
        #-------------------------------------------------------------------------------
        Utility.printArguments([
            "projectCache", "areaOfInterest", "projectFolder",
            "projectServiceName", "ContourBoundFC", "masterService"
        ], [
            projectCache, areaOfInterest, projectFolder, projectServiceName,
            ContourBoundFC, masterService
        ], "C04 ImportContourCacheToMaster")

        # Import cache tiles from a project service into the master service
        #         ts = time.time()
        #         st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
        #         arcpy.AddMessage("Import started at: {0}".format(st))
        a = doTime(
            a, "Ready to start import of '{}' into '{}'".format(
                projectCache, masterService))
        arcpy.ImportMapServerCache_server(
            input_service=masterService,
            source_cache_type="CACHE_DATASET",
            source_cache_dataset=projectCache,
            source_tile_package="",
            upload_data_to_server="DO_NOT_UPLOAD",
            scales=ContourConfig.CONTOUR_SCALES_STRING,
            num_of_caching_service_instances=ContourConfig.CACHE_INSTANCES,
            area_of_interest=areaOfInterest,
            overwrite="OVERWRITE"  # @TODO: Verify this is right
        )

        #         arcpy.ImportMapServerCache_server(input_service="//aiotxftw6na01data/SMB03/elevation/WorkflowManager/arcgis on aiotxftw3gi013.usda.net/Master/Elevation_1M_CONT_2FT.MapServer",
        #                                           source_cache_type="CACHE_DATASET",
        #                                           source_cache_dataset="//aiotxftw6na01data/SMB03/elevation/LiDAR/cache/OK_OK_SugarCreekEric_2008_CONT_2FT/Layers",
        #                                           source_tile_package="",
        #                                           upload_data_to_server="DO_NOT_UPLOAD",
        #                                           scales="9027.977411;4513.988705;2256.994353;1128.497176",
        #                                           num_of_caching_service_instances="6",
        #                                           area_of_interest="//aiotxftw6na01data/sql1/elevation/OK_SugarCreekEric_2008/DERIVED/OK_SugarCreekEric_2008.gdb/BoundaryLASDataset",
        #                                           overwrite="OVERWRITE")
        #         arcpy.ImportMapServerCache_server(input_service="//aiotxftw6na01data/SMB03/elevation/WorkflowManager/arcgis on aiotxftw3gi013.usda.net/Master/Elevation_1M_CONT_2FT.MapServer",
        #                                           source_cache_type="CACHE_DATASET",
        #                                           source_cache_dataset="//aiotxftw6na01data/SMB03/elevation/LiDAR/cache/OK_OK_SugarCreekEric_2008_CONT_2FT/Layers",
        #                                           source_tile_package="",
        #                                           upload_data_to_server="DO_NOT_UPLOAD",
        #                                           scales="9027.977411;4513.988705;2256.994353;1128.497176",
        #                                           num_of_caching_service_instances="6",
        #                                           area_of_interest="//aiotxftw6na01data/sql1/elevation/OK_SugarCreekEric_2008/DERIVED/OK_SugarCreekEric_2008.gdb/BoundaryLASDataset",
        #                                           overwrite="OVERWRITE")
        a = doTime(
            a, "TWO: Finished import of '{}' into '{}'".format(
                projectCache, masterService))

        #         ts = time.time()
        #         st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
        #         arcpy.AddMessage("Import completed at: {0}".format(st))
        a = doTime(
            a,
            "Import of '{}' into '{}' finished".format(projectCache,
                                                       masterService))

    else:
        a = doTime(a, "Failed to find project for job.")

    doTime(aa, "Operation complete")
Exemple #29
0
def createRasterDatasetStats(f_path, stat_file_path=None):
    a = datetime.now()

    try:
        # this no data value doesn't apply to all rasters, but easier to just try and move on
        arcpy.SetRasterProperties_management(f_path,
                                             data_type="#",
                                             statistics="#",
                                             stats_file="#",
                                             nodata="1 {}".format(
                                                 RasterConfig.NODATA_DEFAULT))
    except:
        pass

    try:
        arcpy.CalculateStatistics_management(in_raster_dataset=f_path,
                                             x_skip_factor="1",
                                             y_skip_factor="1",
                                             ignore_values="",
                                             skip_existing="OVERWRITE",
                                             area_of_interest="Feature Set")

    except Exception as e:
        arcpy.AddMessage('Could Not Calculate Statistics')
        arcpy.AddMessage(e)

    raster_properties = {}

    rasterObject = arcpy.Raster(f_path)
    raster_properties[
        BAND_COUNT] = rasterObject.bandCount  # Integer - The number of bands in the referenced raster dataset.
    # raster_properties['catalogPath'] = rasterObject.catalogPath  # String - The full path and the name of the referenced raster dataset.
    raster_properties[
        COMP_TYPE] = rasterObject.compressionType  # String - The compression type. The following are the available types:LZ77,JPEG,JPEG 2000,PACKBITS,LZW,RLE,CCITT GROUP 3,CCITT GROUP 4,CCITT (1D),None.
    # raster_properties[EXTENT] = rasterObject.extent  # Extent - The extent of the referenced raster dataset.
    raster_properties[
        FORMAT] = rasterObject.format  # String - The raster format
    raster_properties[
        HAS_RAT] = rasterObject.hasRAT  # Boolean - Identifies if there is an associated attribute table: True if an attribute table exists or False if no attribute table exists.
    raster_properties[
        HEIGHT] = rasterObject.height  # Integer - The number of rows.
    raster_properties[
        IS_INT] = rasterObject.isInteger  # Boolean - The integer state: True if the raster dataset has integer type.
    raster_properties[
        IS_TEMP] = rasterObject.isTemporary  # Boolean - The state of the referenced raster dataset: True if the raster dataset is temporary or False if permanent.
    raster_properties[
        MAX] = rasterObject.maximum  # Double - The maximum value in the referenced raster dataset.
    raster_properties[
        MEAN] = rasterObject.mean  # Double - The mean value in the referenced raster dataset.
    raster_properties[
        MEAN_CELL_HEIGHT] = rasterObject.meanCellHeight  # Double - The cell size in the y direction.
    raster_properties[
        MEAN_CELL_WIDTH] = rasterObject.meanCellWidth  # Double - The cell size in the x direction.
    raster_properties[
        MIN] = rasterObject.minimum  # Double - The minimum value in the referenced raster dataset.
    #Added to bypass zmin = 'None' error 15 April 2019 BJN
    if raster_properties[MIN] is None or raster_properties[MIN] < -285:
        raster_properties[
            MIN] = 0  # Double - The minimum value in the referenced raster dataset.

    raster_properties[
        NAME] = rasterObject.name  # String - The name of the referenced raster dataset.
    raster_properties[
        NODATA_VALUE] = rasterObject.noDataValue  # Double - The NoData value of the referenced raster dataset.
    raster_properties[
        PATH] = rasterObject.path  # String - The full path and name of the referenced raster dataset.
    raster_properties[
        PIXEL_TYPE] = rasterObject.pixelType  # String - The pixel type of the referenced raster dataset.
    raster_properties[
        SPAT_REF] = rasterObject.spatialReference  # SpatialReference - The spatial reference of the referenced raster dataset.
    raster_properties[
        STAND_DEV] = rasterObject.standardDeviation  # Double - The standard deviation of the values in the referenced raster dataset.
    raster_properties[
        UNCOMP_SIZE] = rasterObject.uncompressedSize  # Double - The size of the referenced raster dataset on disk.
    raster_properties[
        WIDTH] = rasterObject.width  # Integer - The number of columns.

    raster_properties[V_NAME] = None
    raster_properties[V_UNIT] = None
    raster_properties[H_NAME] = None
    raster_properties[H_UNIT] = None
    raster_properties[H_WKID] = None

    if rasterObject.spatialReference is not None:
        raster_properties[V_NAME], raster_properties[
            V_UNIT] = Utility.getVertCSInfo(rasterObject.spatialReference)
        raster_properties[H_NAME] = rasterObject.spatialReference.name
        raster_properties[
            H_UNIT] = rasterObject.spatialReference.linearUnitName
        raster_properties[H_WKID] = rasterObject.spatialReference.factoryCode

    raster_properties[XMIN] = rasterObject.extent.XMin
    raster_properties[YMIN] = rasterObject.extent.YMin
    raster_properties[XMAX] = rasterObject.extent.XMax
    raster_properties[YMAX] = rasterObject.extent.YMax

    valList = []
    for key in KEY_LIST:
        valList.append(raster_properties[key])

    keyList = ','.join(KEY_LIST)
    for i, value in enumerate(valList):
        valList[i] = str(value)
    valList = ','.join(valList)

    #     arcpy.AddMessage("\t{}".format(keyList))
    #     arcpy.AddMessage("\t{}".format(valList))

    if stat_file_path is not None:
        # Output file
        deleteFileIfExists(stat_file_path)
        OutFile = open(stat_file_path, 'w')

        OutFile.write('{}\n{}\n'.format(keyList, valList))
        OutFile.close()

    doTime(a, "\tCreated STATS {}".format(stat_file_path))
    doTime(a, "\tCreated STATS {}".format(raster_properties))

    return raster_properties
Exemple #30
0
def createVectorBoundaryC(f_path, f_name, raster_props, stat_out_folder, vector_bound_path, minZ, maxZ, bound_path, elev_type):
    a = datetime.now()
    arcpy.AddMessage("\tCreating {} bound for '{}' using min z '{}' and max z'{}'".format(elev_type, f_path, minZ, maxZ))

    vector_1_bound_path = os.path.join(stat_out_folder, "B1_{}.shp".format(f_name))
    vector_2_bound_path = os.path.join(stat_out_folder, "B2_{}.shp".format(f_name))
    vector_3_bound_path = os.path.join(stat_out_folder, "B3_{}.shp".format(f_name))
    vector_4_bound_path = os.path.join(stat_out_folder, "B4_{}.shp".format(f_name))
    vector_5_bound_path = os.path.join(stat_out_folder, "B5_{}.shp".format(f_name))
    deleteFileIfExists(vector_bound_path, useArcpy=True)
    deleteFileIfExists(vector_1_bound_path, useArcpy=True)
    deleteFileIfExists(vector_2_bound_path, useArcpy=True)
    deleteFileIfExists(vector_3_bound_path, useArcpy=True)
    deleteFileIfExists(vector_4_bound_path, useArcpy=True)
    deleteFileIfExists(vector_5_bound_path, useArcpy=True)

    arcpy.RasterDomain_3d(in_raster=f_path, out_feature_class=vector_5_bound_path, out_geometry_type="POLYGON")
    Utility.addToolMessages()

    arcpy.MultipartToSinglepart_management(in_features=vector_5_bound_path, out_feature_class=vector_4_bound_path)
    Utility.addToolMessages()
    checkRecordCount(vector_4_bound_path)

    arcpy.EliminatePolygonPart_management(in_features=vector_4_bound_path, out_feature_class=vector_3_bound_path, condition="AREA", part_area="10000 SquareMiles", part_area_percent="0", part_option="CONTAINED_ONLY")
    Utility.addToolMessages()
    checkRecordCount(vector_3_bound_path)

    arcpy.SimplifyPolygon_cartography(
        in_features=vector_3_bound_path,
        out_feature_class=vector_2_bound_path,
        algorithm="POINT_REMOVE",
        tolerance="{} Meters".format(C_SIMPLE_DIST),
        minimum_area="0 Unknown",
        error_option="RESOLVE_ERRORS",
        collapsed_point_option="NO_KEEP",
        in_barriers=""
        )
    Utility.addToolMessages()
    checkRecordCount(vector_2_bound_path)

    arcpy.AddMessage('ZFlag: ' + arcpy.env.outputZFlag)
    arcpy.AddMessage('MFlag: ' + arcpy.env.outputMFlag)

    arcpy.Dissolve_management(in_features=vector_2_bound_path, out_feature_class=vector_1_bound_path, dissolve_field="", statistics_fields="", multi_part="MULTI_PART", unsplit_lines="DISSOLVE_LINES")
    Utility.addToolMessages()
    checkRecordCount(vector_1_bound_path)

    deleteFields(vector_1_bound_path)

    record_count = checkRecordCount(vector_1_bound_path)
    footprint_area = 0
    for row in arcpy.da.SearchCursor(vector_1_bound_path, ["SHAPE@"]):  # @UndefinedVariable
        shape = row[0]
        footprint_area = shape.getArea ("PRESERVE_SHAPE", "SQUAREMETERS")

    if footprint_area <= 0:
        arcpy.AddMessage("\tWARNGING: Area is 0 in {} '{}' bound '{}'".format(elev_type, f_path, vector_bound_path))

    addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[PATH][0], field_alias=FIELD_INFO[PATH][1], field_type=FIELD_INFO[PATH][2], field_length=FIELD_INFO[PATH][3])
    addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[NAME][0], field_alias=FIELD_INFO[NAME][1], field_type=FIELD_INFO[NAME][2], field_length=FIELD_INFO[NAME][3])
    addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[AREA][0], field_alias=FIELD_INFO[AREA][1], field_type=FIELD_INFO[AREA][2], field_length=FIELD_INFO[AREA][3])
    addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[ELEV_TYPE][0], field_alias=FIELD_INFO[ELEV_TYPE][1], field_type=FIELD_INFO[ELEV_TYPE][2], field_length=FIELD_INFO[ELEV_TYPE][3])
    addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[RANGE][0], field_alias=FIELD_INFO[RANGE][1], field_type=FIELD_INFO[RANGE][2], field_length=FIELD_INFO[RANGE][3])

    deleteFields(vector_1_bound_path)

    arcpy.AddMessage(raster_props)
    for field_name in KEY_LIST:
        time.sleep(0.25)
        field_shpname = FIELD_INFO[field_name][0]
        field_alias = FIELD_INFO[field_name][1]
        field_type = FIELD_INFO[field_name][2]
        field_length = FIELD_INFO[field_name][3]
        field_value = raster_props[field_name]
        if field_type == "TEXT":
            if str(field_value).endswith('\\'):
                field_value = str(field_value)[0:-1]
            field_value = r'"{}"'.format(field_value)

        addField(in_table=vector_1_bound_path, field_name=field_shpname, field_alias=field_alias, field_type=field_type, field_length=field_length, expression=field_value)


    b_f_path, b_f_name = os.path.split(f_path)
    b_f_name = os.path.splitext(b_f_name)[0]
    arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[PATH][0], expression='"{}"'.format(b_f_path), expression_type="PYTHON_9.3")
    arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[NAME][0], expression='"{}"'.format(b_f_name), expression_type="PYTHON_9.3")
    arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[AREA][0], expression=footprint_area, expression_type="PYTHON_9.3")
    arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[ELEV_TYPE][0], expression='"{}"'.format(elev_type), expression_type="PYTHON_9.3")
    try:
        z_expr = "!{}! - !{}!".format(FIELD_INFO[MAX][0], FIELD_INFO[MIN][0])
        arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[RANGE][0], expression=z_expr, expression_type="PYTHON_9.3")
    except:
        pass

    deleteFileIfExists(vector_bound_path, True)
    arcpy.Clip_analysis(in_features=vector_1_bound_path, clip_features=bound_path, out_feature_class=vector_bound_path, cluster_tolerance="")
    Utility.addToolMessages()
    checkRecordCount(vector_bound_path)

    deleteFields(vector_bound_path)

    #debug = False
    #try:
    #    debug = (str(f_path).find("alamazoo") >= 0)
    #except:
    #    debug = False
    #if not debug:
    deleteFileIfExists(vector_1_bound_path, useArcpy=True)
    deleteFileIfExists(vector_2_bound_path, useArcpy=True)
    deleteFileIfExists(vector_3_bound_path, useArcpy=True)
    deleteFileIfExists(vector_4_bound_path, useArcpy=True)
    deleteFileIfExists(vector_5_bound_path, useArcpy=True)
    #else:
    #    arcpy.AddMessage("\tleaving artifacts for {} '{}'".format(elev_type, vector_bound_path))

    doTime(a, "\tCreated BOUND {}".format(vector_bound_path))