コード例 #1
0
def process():
    # TODO: Add User Error Reporting alerting user of issue with accessing their bucket.
    # Begin Script
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError
        try:
            bucket_url = 'https://s3.{0}.amazonaws.com/{1}/'.format(region, bucket_name)
            f_list = []
            fp_list = []
            '''
            conn = client('s3')  # again assumes boto.cfg setup, assume AWS S3
            
            for key in conn.list_objects(Bucket=bucket_name)['Contents']:
                if not key['Key'].endswith('/') and list_folders is False:
                    f_list.append(key['Key'])
                    fp_list.append(bucket_url + key['Key'])
                if list_folders is True:
                    f_list.append(key['Key'])
                    fp_list.append(bucket_url + key['Key'])
            '''
            s3r = resource('s3')
            bucket_list = [item.key for item in list(s3r.Bucket(bucket_name).objects.all())]
            for key in bucket_list:
                if not key.endswith('/') and list_folders is False:
                    f_list.append(key)
                    fp_list.append(bucket_url + key)
                if list_folders is True:
                    f_list.append(key)
                    fp_list.append(bucket_url + key)


            # Create a Pandas dataframe from the data.
            df = pd.DataFrame({'bucket_url': bucket_url, 'key': f_list, 'full_path': fp_list})

            with pd.ExcelWriter(out_spreadsheet) as writer:
                df.to_excel(writer)
        except NoCredentialsError:
            err_str = 'Detected Boto3 Credentials are not set. see the following instructions ' \
                      'https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html#configuration'
            AddError(err_str)
            raiseValueError(err_str)
        except s3_client.exceptions.NoSuchBucket:
            AddError('Aws bucket %s does not exist' % bucket_name)
            raise ValueError('Aws bucket %s does not exist' % bucket_name)
        # Check back in Image Analyst license
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("ImageAnalyst license is unavailable")
        print("ImageAnalyst license is unavailable")
    except ExecuteError:
        AddError(GetMessages(2))
        print(GetMessages(2))
コード例 #2
0
def checkout_arcgis_extension(extension):
    """"""

    if CheckExtension(extension) == 'Available':
        CheckOutExtension(extension)
    else:
        print "the '{}' extension is unavailable so the script can't run " \
              "successfully, if you have ArcGIS Desktop open close it, as " \
              "it may be utilizing the license, otherwise check the " \
              "license server log to determine who has it checked out"
        exit()
コード例 #3
0
def calculate_network_locations(points, network):
    """
  Computes the locations of |points| in |network|.
  |points|: a feature class (points or polygons).
  |network|: a network dataset.
  """
    CheckOutExtension("Network")
    CalculateLocations_na(in_point_features=points,
                          in_network_dataset=network,
                          search_tolerance="5000 Meters",
                          search_criteria=("%s SHAPE; %s SHAPE;" %
                                           network_features(network)),
                          exclude_restricted_elements="INCLUDE")
    CheckInExtension("Network")
def roadsNonMonoCheck():
    try:
        # Check out license
        print('The result of CheckExtension("Highways") is ' + str(CheckExtension("Highways")) + '.')
        if CheckExtension("Highways") == 'Available':
            CheckOutExtension("Highways")
            
            # Do the license check before the deletion, so that you don't
            # remove data and then not put it back in the case that the
            # license is not available.
            from arcpy import DetectNonMonotonicRoutes_locref
            
            if Exists(nonMonotonicOutputGDB):
                try:
                    Delete_management(nonMonotonicOutputGDB)
                except:
                    pass
            else:
                pass
            
            nonMonotonicOutputGDBName = returnGDBOrSDEName(nonMonotonicOutputGDB)
            
            CreateFileGDB_management(mainFolder, nonMonotonicOutputGDBName)
            time.sleep(1)
            
            DetectNonMonotonicRoutes_locref(networkToReview, nonMonotonicOutputFC, "Any", "F_Date", "T_Date", "SourceRouteId")
            
            print("The Roads & Highways Non-Monotonic routes check for " + str(networkToReview) + " has completed.\n")
            
        else:
            print('The Roads & Highways extension is not currently available.')
            print('Skipping R&H Non-Monotonicity check.')
        
    except Exception as Exception1:
        # If an error occurred, print line number and error message
        import traceback, sys
        tb = sys.exc_info()[2]
        print "Line %i" % tb.tb_lineno
        print Exception1.message
        try:
            del Exception1
        except:
            pass
    finally:
        try:
            # Check the license back in
            CheckInExtension("Highways")
        except:
            pass
コード例 #5
0
 def arcpy_schema_compare(self, base, output_folder_location, test):
     self.queue.put("Importing ArcPy\n\n")
     from arcpy import CheckOutExtension
     from arcpy import CheckInExtension
     from arcpy import env
     CheckOutExtension('Datareviewer')
     self.queue.put("Checking Out Datareviewer Extension\n\n")
     from arcpy import GeodatabaseSchemaCompare_Reviewer
     env.overwriteOutput = True
     self.queue.put('Arcpy and Datareviewer checked out\n\n')
     result = GeodatabaseSchemaCompare_Reviewer(
         base, test, output_folder_location
     )  # THIS IS CAUSING PYTHON TO CRASH ON RE_RUN
     self.queue.put(result.getMessages())
     CheckInExtension('Datareviewer')
     del result
     self.queue.put("\n\nChecking In Datareviewer Extension\n\n")
コード例 #6
0
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError
        subset_image_for_texture(in_image, in_polygon, area, out_image)
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
コード例 #7
0
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages, AddError

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError
        try:
            from PIL import Image
        except ModuleNotFoundError:
            from arcpy import AddError
            AddError(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            print(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            exit()
        i_list, extent = get_images_and_stats(
            in_mosaic
        )  # Obtain image statistics and info from mosaic for processing
        for i in i_list:  # Check that output folder is not the path of i
            if out_folder == path.dirname(i[0]):
                AddError(
                    "outFolder cannot be the same folder/directory as images referenced in the mosaic dataset"
                )
                exit()
        if not path.exists(out_folder):
            makedirs(out_folder)
        texture_images(i_list, extent, in_texture, in_polygon, out_folder,
                       method, blur_distance)  # Generate Texture-Masked tiles

        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("Image Analyst license is unavailable")
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
コード例 #8
0
def main():
    from arcpy import ExecuteError, GetMessages, CheckOutExtension, CheckExtension, CheckInExtension

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError

        seamless_texture(inImg, outImg)
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
コード例 #9
0
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError

        create_mask(in_raster, in_polygon, out_raster)
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
コード例 #10
0
def las_tiles_to_numpy_pandas(in_lidar_folder, sr, lidar_format, returns,
                              class_codes, format_for_library):
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("3D") == "Available":
            CheckOutExtension("3D")
        else:
            # raise a custom exception
            raise LicenseError

        if not lidar_format.startswith(
                "."):  # Ensure lidar format contains a format decimal
            lidar_format = ".{}".format(lidar_format)
        supported_lidar_formats = [".las", ".zlas"]
        assert lidar_format in supported_lidar_formats, \
            "LiDAR format {0} unsupported. Ensure LiDAR format is in {1}".format(lidar_format, supported_lidar_formats)

        lidar_tiles = [
            f for f in listdir(in_lidar_folder)
            if f.endswith("{}".format(lidar_format))
        ]
        if len(lidar_tiles) < 1:
            AddError("No LiDAR tiles detected in input directory")
        count = 0
        for tile in lidar_tiles:
            AddMessage("processing lidar tile {0} of {1} : {2}".format(
                count + 1, len(lidar_tiles), tile))
            lidar_tile = join(in_lidar_folder, tile)
            las_tile_to_numpy_pandas(lidar_tile, sr, returns, class_codes,
                                     format_for_library)
            count += 1
        AddMessage("processing {} lidar tiles complete".format(count))

        # Check back in 3D Analyst license
        CheckInExtension("3D")
    except LicenseError:
        AddError("3D Analyst license is unavailable")
    except ExecuteError:
        AddError("3D Analyst license is unavailable")
        print(GetMessages(2))
コード例 #11
0
    def initGIS(self):
        """
        Factory method for setting up required ArcPy processing
        """

        # GIS globals
        env.overwriteOutput = True
        env.extent = "MINOF"
        CheckOutExtension("Spatial")

        self.output_location = os.path.join(os.getcwd(), 'tkinter_output')
        print self.output_location
        if not os.path.isdir(self.output_location):
            os.mkdir(self.output_location)

        env.workspace = self.output_location

        self.tracts_file = os.path.join(self.output_location,
                                        "cancer_tracts.shp")
        self.mxd = mapping.MapDocument(
            os.path.join(self.output_location, "cancer_data.mxd"))
コード例 #12
0
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages, AddMessage
    from arcpy.management import BuildPyramids

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError
        try:
            from PIL import Image
        except ModuleNotFoundError:
            from arcpy import AddError
            AddError(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            print(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            exit()

        mask_image(in_image, in_mask, in_texture, out_image, method,
                   blur_distance)
        AddMessage("Building Pyramids")
        BuildPyramids(out_image, -1, "NONE", "NEAREST", "DEFAULT", 75,
                      "OVERWRITE")
        CheckInExtension("ImageAnalyst")

    except LicenseError:
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
コード例 #13
0
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages, AddError

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError

        batch_create_tiled_ortho_mosaics(in_folder, image_format, num_bands,
                                         pixel_depth, product_definition,
                                         product_band_definitions, pixel_size,
                                         out_folder)

        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("Image Analyst license is unavailable")
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
コード例 #14
0
def process():
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError

        # System Parameters
        tile_name = "FileName"

        # Begin Script
        temp_fc = join("in_memory", "temp_fc")
        CopyFeatures(in_fc, temp_fc)
        for f in file_names:
            AddField(temp_fc, f, "TEXT")

        df = pd.read_excel(in_xlsx, index_col=0)

        def attribute_tile(in_feature_class,
                           in_tile_name,
                           in_df,
                           in_name,
                           xlsx_row_name=xlsx_row_name):
            with da.UpdateCursor(in_feature_class,
                                 [in_tile_name, in_name]) as cursor:
                for fc_r in cursor:
                    for df_i, df_r in in_df.iterrows():
                        url = df_r[xlsx_row_name]
                        n = Path(url).stem
                        t_name = fc_r[0]
                        t_n = Path(t_name).stem
                        if n.startswith(in_name) and t_n in n:
                            fc_r[1] = url
                    cursor.updateRow(fc_r)

        # Attribute the LiDAR Derivatives
        for n in file_names:
            attribute_tile(temp_fc, tile_name, df, n)

        def attribute_tile_lidar(in_feature_class,
                                 in_tile_name,
                                 in_df,
                                 in_name,
                                 xlsx_row_name=xlsx_row_name):
            with da.UpdateCursor(in_feature_class,
                                 [in_tile_name, in_name]) as cursor:
                for fc_r in cursor:
                    for df_i, df_r in in_df.iterrows():
                        url = df_r[xlsx_row_name]
                        n = split(url)[1]
                        t_name = fc_r[0]
                        if n == t_name:
                            fc_r[1] = url
                    cursor.updateRow(fc_r)

        # Attribute the LiDAR tile now
        AddField(temp_fc, in_lidar_format, "TEXT")
        attribute_tile_lidar(temp_fc,
                             tile_name,
                             df,
                             in_lidar_format,
                             xlsx_row_name=xlsx_row_name)
        '''
        # Print Fields for debugging/assessing results of above operations
        file_names.append(in_lidar_format)
        print(file_names)
        with da.SearchCursor(temp_fc, file_names) as cursor:
            for fc_r in cursor:
                print(fc_r)
        '''

        # Delete Pandas Dataframe from Memory
        del df

        # Copy in_memory temporary feature class to output location
        CopyFeatures(temp_fc, out_fc)

        # Delete Temporary Feature Class
        Delete(temp_fc)

        # Check back in Image Analyst license
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("ImageAnalyst license is unavailable")
        print("ImageAnalyst license is unavailable")
    except ExecuteError:
        AddError(GetMessages(2))
        print(GetMessages(2))
コード例 #15
0
def process():
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError

        # Constants - DO NOT MODIFY
        supported_folder_extensions = [
            'gdb'
        ]  # Currently not using this.... for future needs...

        def ensure_dir(file_path):
            directory = os.path.dirname(file_path)
            if not os.path.exists(directory):
                os.makedirs(directory)

        def zipper(in_list, out_file_path):
            out_file = '{0}.zip'.format(out_file_path)
            ensure_dir(out_file)
            with zipfile.ZipFile(out_file, 'w') as zipMe:
                for f in in_list:
                    zipMe.write(f, compress_type=zipfile.ZIP_DEFLATED)

        def zipper_gdb(in_gdb, out_file_name):
            assert in_gdb.endswith(
                '.gdb'
            ), "Error: file extension {0} not detected in in_folder".format(
                ".gdb")
            root_dir = os.path.dirname(in_gdb)
            gdb_name = os.path.basename(in_gdb)
            myzip = zipfile.ZipFile(os.path.join(root_dir, out_file_name), 'w',
                                    zipfile.ZIP_DEFLATED)
            for folder, subfolder, file in os.walk(
                    os.path.join(root_dir, gdb_name)):
                for each in subfolder + file:
                    source = os.path.join(folder, each)
                    if not source.endswith(".lock"):
                        # remove the absolute path to compose arcname
                        # also handles the remaining leading path separator with lstrip
                        arcname = source[len(root_dir):].lstrip(os.sep)
                        # write the file under a different name in the archive
                        myzip.write(source, arcname=arcname)
            myzip.close()

        def zip_folder(in_folder, out_file_name):
            myzip = zipfile.ZipFile(os.path.join(in_folder, out_file_name),
                                    'w', zipfile.ZIP_DEFLATED)
            for folder, subfolder, file in os.walk(in_folder):
                for each in subfolder + file:
                    source = os.path.join(folder, each)
                    # remove the absolute path to compose arcname
                    # also handles the remaining leading path separator with lstrip
                    arcname = source[len(in_folder):].lstrip(os.sep)
                    # write the file under a different name in the archive
                    myzip.write(source, arcname=arcname)
            myzip.close()

        # TODO: do something with folder-based file structures. ex: GDB .... user the zipper_folder_structure() function above.
        from arcpy import AddMessage

        files_in_dir = []
        for root, dirs, files in os.walk(in_directory):
            for filename in files:
                files_in_dir.append([root, filename])

        file_name_list = []
        files_to_zip = []
        for f in files_in_dir:
            root = f[0]
            filename = f[1]
            file = os.path.join(root, filename)
            file_partitioned = filename.partition('.')[0]
            if file_partitioned not in file_name_list:
                if len(files_to_zip) > 1:
                    out_file_path = files_to_zip[0].replace(
                        in_directory, out_directory).partition('.')[0]
                    zipper(files_to_zip, out_file_path)
                    AddMessage(files_to_zip)
                    files_to_zip = []
                file_name_list.append(file_partitioned)
            else:
                files_to_zip.append(file)
                # If last file in directory for processing
                if root == files_in_dir[-1][0] and filename == files_in_dir[
                        -1][1]:
                    out_file_path = files_to_zip[0].replace(
                        in_directory, out_directory).partition('.')[0]
                    zipper(files_to_zip, out_file_path)

        # Check back in Image Analyst license
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("ImageAnalyst license is unavailable")
        print("ImageAnalyst license is unavailable")
    except ExecuteError:
        AddError(GetMessages(2))
        print(GetMessages(2))
コード例 #16
0
def reviewData():
    try:
        print("Starting the Data Reviewer batch job at:\n" +
              str(reviewerBatchJob) + ".")
        print("For the data located in:\n" + str(workspaceToReview) + ".")
        print(
            "If one of the feature classes, Routes or CalPts, does not exist in the place that the"
        )
        print(
            "data reviewer batch job looks for it, then you will get an 'Unknown Error'."
        )
        print(
            "This can be remedied by updating the data reviewer batch job's workspace settings."
        )
        # Test the data reviewer part:
        if CheckExtension("datareviewer") == 'Available':
            print("Extension availability check complete.")
            CheckOutExtension("datareviewer")

            # Checking to see if the output already exists.
            # If so, remove it so that it can be recreated. -- For the errors, might need a better process, so that
            # it's possible to track where the errors were at the start and how things progressed.
            if Exists(reviewerSessionGDB):
                Delete_management(reviewerSessionGDB)
            else:
                pass

            # Create new geodatabase
            # Replace with returnGDBOrSDEPath(reviewerSessionGDB), returnGDBOrSDEName(reviewerSessionGDB)
            # or similar functions
            CreateFileGDB_management(reviewerSessionGDBFolder,
                                     returnGDBOrSDEName(reviewerSessionGDB))

            # Execute EnableDataReviewer
            EnableDataReviewer_Reviewer(reviewerSessionGDB, "#", "#",
                                        "DEFAULTS")

            # Create a new Reviewer session
            ##CreateReviewerSession_Reviewer (reviewer_workspace, session_name, {session_template}, {duplicate_checking}, {store_geometry}, {username}, {version})
            CreateReviewerSession_Reviewer(reviewerSessionGDB, reviewerSession,
                                           "", "NONE", "STORE_GEOMETRY")

            # execute the batch job
            batchJobResult = ExecuteReviewerBatchJob_Reviewer(
                reviewerSessionGDB, sessionReviewerSession, reviewerBatchJob,
                workspaceToReview)

            print("Data Reviewer batch job complete.")

            # get the output table view from the result object
            outputTable = batchJobResult.getOutput(0)

            print("The output table is called " + str(outputTable.name) + "."
                  )  # prints REVBATCHRUNTABLE

            CheckInExtension("datareviewer")

        else:
            print(
                "The 'datareviewer' extension is not available. Skipping checks."
            )

    except Exception as Exception1:
        # If an error occurred, print line number and error message
        import traceback, sys
        tb = sys.exc_info()[2]
        print "Line %i" % tb.tb_lineno
        print Exception1.message
        try:
            del Exception1
        except:
            pass
    finally:
        CheckInExtension("datareviewer")
コード例 #17
0
Name:         Urban Water Analysis
              Conventional analysis
Purpose:      find buildings covered by distance and elevation
Project:      language for spatial computing
Author:       ICRC (2017), adapted by Selina Studer
License:      Apache License 2.0
Created:      26.12.2018
Libraries:    arcpy
-------------------------------------------------------------------------------"""

from arcpy import CheckOutExtension, env, SelectLayerByLocation_management, FeatureToPoint_management, CopyFeatures_management, JoinField_management, SelectLayerByAttribute_management
from arcpy.sa import ExtractValuesToPoints
from arcpy.da import SearchCursor

env.overwriteOutput = True
CheckOutExtension("Spatial")

# load input data
area = 'C:/area.shp'
dem = 'C:/dem.tif'
waterPoint = 'C:/waterPoints.shp'
building = 'C:/buildings.shp'

# set parameters
distance = 50
elevation = 3

# select water points within area
waterPoint_inArea = SelectLayerByLocation_management(waterPoint, 'INTERSECT',
                                                     area)
コード例 #18
0
def process():
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("3D") == "Available":
            CheckOutExtension("3D")
        else:
            # raise a custom exception
            raise LicenseError

        # Constants - DO NOT MODIFY
        split_area = "split_area"
        orig_area = "orig_area"

        def calc_area(in_fc, field_name):
            AddField(in_fc, field_name, "DOUBLE")
            with da.UpdateCursor(in_fc, [field_name, "SHAPE@AREA"]) as cursor1:
                for r1 in cursor1:
                    r1[0] = r1[1]
                    cursor1.updateRow(r1)

        def field_exists(in_fc, in_field):
            from arcpy import ListFields
            if in_field in [f.name for f in ListFields(in_fc)]:
                return True
            else:
                return False

        def delete_field_if_exists(in_fc, in_field):
            if field_exists(in_fc, in_field):
                DeleteField(in_fc, in_field)

        assert field_exists(in_buildings, building_fid), \
            "no attribute named {} in feature class".format(building_fid)

        for field in [tile_fid, file_name]:
            delete_field_if_exists(in_buildings, field)

        temp_fp = join("in_memory", "mp_fp")
        ddd.MultiPatchFootprint(in_buildings, temp_fp, "bldg_fid")

        calc_area(in_fc=temp_fp, field_name=orig_area)

        temp_isect = join("in_memory", "temp_isect")
        Intersect(r"{0} #;{1} #".format(temp_fp, in_tiles), temp_isect, "ALL",
                  None, "INPUT")

        # Delete Temporary Multipatch Footprint
        Delete(temp_fp)

        calc_area(in_fc=temp_isect, field_name=split_area)

        temp_isect_asc = join("in_memory", "temp_isect_asc")
        Sort(temp_isect, temp_isect_asc, [[building_fid, "ASCENDING"]])
        # Delete Temporary Intersect Feature Class
        Delete(temp_isect)

        fields = [building_fid, tile_fid, file_name, orig_area, split_area]

        # Generate a list of duplicates
        bldg_list = []
        with da.SearchCursor(temp_isect_asc, building_fid) as cursor2:
            for row in cursor2:
                bldg_list.append(row[0])

        duplicates = [
            item for item, count in Counter(bldg_list).items() if count > 1
        ]

        duplicates_list = []
        for i in duplicates:
            duplicates_list.append([i, bldg_list.count(i)])

        # TODO: Resolve why tile_fid is not showing up below when BuildingFID and TileFID are OID fields. "In_memory" issue
        '''
        # \\ Begin Debug print code
        from arcpy import AddMessage
        fds = [f.name for f in arcpy.ListFields(temp_isect_asc) if f.name in fields]
        AddMessage(fds)
        nfds = [f.name for f in arcpy.ListFields(temp_isect_asc) if f.name not in fields]
        AddMessage(nfds)
        # End Debug pring code //
        '''
        final_list = []
        with da.SearchCursor(temp_isect_asc, fields) as cursor3:
            prev_area = -1
            prev_item_list = []
            item_count = 0
            fcound = 0
            for row in cursor3:
                if row[0] not in duplicates:
                    final_list.append([row[0], row[1], row[2]])
                else:
                    area = row[3] - row[4]
                    index = duplicates.index(row[0])
                    total_items = duplicates_list[index][1]
                    if row[0] == duplicates[
                            0] and item_count == 0:  # Deal with first item differently
                        item_count += 1
                        prev_area = area
                        prev_item_list = [row[0], row[1], row[2]]
                    elif item_count + 1 == total_items:  # Deal with last item in list
                        if prev_area <= area:
                            prev_area = area
                            prev_item_list = [row[0], row[1], row[2]]
                        final_list.append(prev_item_list)
                        item_count = 0
                        prev_area = -1
                        prev_item_list = []
                    elif item_count + 1 != total_items:
                        if prev_area <= area:
                            prev_area = area
                            prev_item_list = [row[0], row[1], row[2]]
                        item_count += 1
        # Append results back to Input Feature Class
        AddField(in_buildings, tile_fid, "LONG")
        AddField(in_buildings, file_name, "TEXT")
        with da.UpdateCursor(in_buildings,
                             [building_fid, tile_fid, file_name]) as cursor:
            for r in cursor:
                for i in final_list:
                    if r[0] == i[0]:
                        r[1] = int(i[1])
                        r[2] = str(i[2])
                cursor.updateRow(r)

        Delete(temp_isect)
        del bldg_list
        del duplicates_list
        del duplicates

        # Check back in 3D Analyst license
        CheckInExtension("3D")
    except LicenseError:
        AddError("3D Analyst license is unavailable")
        print("3D Analyst license is unavailable")
    except ExecuteError:
        AddError("3D Analyst license is unavailable")
        print(GetMessages(2))
コード例 #19
0
def main():
  """
  Runs the centrality tool.
  """
  env.overwriteOutput = True # Enable overwritting
  CheckOutExtension("Network")

  # Success of the program through the six steps
  success = True

  # Inputs to the tool
  if len(argv) != INPUT_COUNT + 1:
    raise Exception("Invalid number of inputs")
  input_number = index()
  input_number.next() # Skip over sys.argv[0]
  inputs = {}
  inputs[INPUT_BUILDINGS] = argv[input_number.next()]
  inputs[POINT_LOCATION] = ("INSIDE" if argv[input_number.next()] == "true" else
      "CENTROID")
  inputs[INPUT_NETWORK] = argv[input_number.next()]
  inputs[COMPUTE_REACH] = argv[input_number.next()] == "true"
  inputs[COMPUTE_GRAVITY] = argv[input_number.next()] == "true"
  inputs[COMPUTE_BETWEENNESS] = argv[input_number.next()] == "true"
  inputs[COMPUTE_CLOSENESS] = argv[input_number.next()] == "true"
  inputs[COMPUTE_STRAIGHTNESS] = argv[input_number.next()] == "true"
  inputs[ID_ATTRIBUTE] = argv[input_number.next()]
  inputs[NODE_WEIGHT_ATTRIBUTE] = argv[input_number.next()]
  inputs[IMPEDANCE_ATTRIBUTE] = argv[input_number.next()]
  try: inputs[SEARCH_RADIUS] = float(argv[input_number.next()])
  except: inputs[SEARCH_RADIUS] = INFINITE_RADIUS
  inputs[USE_NETWORK_RADIUS] = (argv[input_number.next()] ==
      ON_THE_NETWORK_OPTION)
  try: inputs[BETA] = float(argv[input_number.next()])
  except: raise Invalid_Input_Exception("Beta")
  inputs[NORMALIZE_RESULTS] = [measure for measure in
      argv[input_number.next()].split(";") if measure != "#"]
  inputs[OUTPUT_LOCATION] = argv[input_number.next()]
  inputs[OUTPUT_FILE_NAME] = argv[input_number.next()]
  inputs[ACCUMULATOR_ATTRIBUTES] = argv[input_number.next()]

  # Record the origin nodes for centrality measurements
  # This is important if the user selects a subset of the features to be origins
  selected_features = all_values_in_column(inputs[INPUT_BUILDINGS],
    inputs[ID_ATTRIBUTE])
  # Clear selection if we got a layer file
  try:
    SelectLayerByAttribute_management(inputs[INPUT_BUILDINGS],
      "CLEAR_SELECTION")
  except:
    pass

  # Adjacency List table name
  node_locations_needed = (inputs[COMPUTE_STRAIGHTNESS] or
      not inputs[USE_NETWORK_RADIUS])
  adj_dbf_name = ("%s_%s_%s_%s_%s_%s.dbf" % (ADJACENCY_LIST_NAME,
      basename(inputs[INPUT_BUILDINGS]), basename(inputs[INPUT_NETWORK]),
      inputs[ID_ATTRIBUTE], inputs[IMPEDANCE_ATTRIBUTE],
      inputs[ACCUMULATOR_ATTRIBUTES])).replace("#", "None")
  if len(adj_dbf_name) > MAX_FILE_NAME_LENGTH:
    AddWarning(WARNING_LARGE_ADJ_FILE_NAME)
  adj_dbf = join(inputs[OUTPUT_LOCATION], adj_dbf_name)

  # Output file names
  output_feature_class_name = feature_class_name(inputs[OUTPUT_FILE_NAME])
  output_feature_class = "%s.shp" % join(inputs[OUTPUT_LOCATION],
      output_feature_class_name)
  # Create a feature class that is a copy of the input buildings
  try:
    AddMessage(INPUT_BUILDINGS_COPY_STARTED)
    CreateFeatureclass_management(out_path=inputs[OUTPUT_LOCATION],
        out_name=output_feature_class_name)
    CopyFeatures_management(in_features=inputs[INPUT_BUILDINGS],
        out_feature_class=output_feature_class)
    AddMessage(INPUT_BUILDINGS_COPY_FINISHED)
  except:
    AddWarning(GetMessages(2))
    AddMessage(INPUT_BUILDINGS_COPY_FAILED)
    success = False
  output_layer_name = layer_name(inputs[OUTPUT_FILE_NAME])
  output_layer = "%s.lyr" % join(inputs[OUTPUT_LOCATION], output_layer_name)

  # If output has already been created, don't carry on
  if Exists(output_layer):
    AddWarning(WARNING_OUTPUT_ALREADY_EXISTS)
    success = False

  # We will convert polygon input buildings to point feature class
  buildings_description = Describe(output_feature_class)
  if buildings_description.shapeType == "Point":
    # Input buildings are already a point shape file
    inputs[INPUT_POINTS] = output_feature_class
  elif buildings_description.shapeType == "Polygon":
    # Input buildings need to be converted to point feature class
    point_feature_class_name = POINT_FEATURE_CLASS_NAME(
        basename(output_feature_class), inputs[POINT_LOCATION])
    inputs[INPUT_POINTS] = "%s.shp" % join(inputs[OUTPUT_LOCATION],
        point_feature_class_name)
    # If FID is used as ID attribute, we need to change it since a point
    #     shapefile will be in use
    if inputs[ID_ATTRIBUTE] == "FID":
      inputs[ID_ATTRIBUTE] = ORIGINAL_FID
  else:
    # Input buildings need to be either points or polygons
    raise Invalid_Input_Exception("Input Buildings")

  # Find the appropriate symbology layer
  for metric_index in range(len(METRICS)):
      if inputs[COMPUTE_REACH + metric_index]:
          first_metric = METRICS[metric_index]
          break
  symbology_layer_name = get_symbology_layer_name(
      buildings_description.shapeType, first_metric)
  symbology_layer = join(SYMBOLOGY_DIR, symbology_layer_name)

  def clean_up():
    """
    Removes all auxiliary files
    """
    auxiliary_dir = join(inputs[OUTPUT_LOCATION], AUXILIARY_DIR_NAME)
    od_cost_matrix_layer = join(auxiliary_dir, OD_COST_MATRIX_LAYER_NAME)
    od_cost_matrix_lines = join(auxiliary_dir, OD_COST_MATRIX_LINES)
    temp_adj_dbf_name = "%s~.dbf" % adj_dbf_name[-4]
    temp_adj_dbf = join(inputs[OUTPUT_LOCATION], temp_adj_dbf_name)
    partial_adj_dbf = join(auxiliary_dir, PARTIAL_ADJACENCY_LIST_NAME)
    polygons = join(auxiliary_dir, POLYGONS_SHAPEFILE_NAME)
    raster = join(auxiliary_dir, RASTER_NAME)
    polygons_layer = join(auxiliary_dir, POLYGONS_LAYER_NAME)
    input_points_layer = join(auxiliary_dir, INPUT_POINTS_LAYER_NAME)
    for delete_path in [input_points_layer, polygons_layer, raster, polygons,
        partial_adj_dbf, temp_adj_dbf, od_cost_matrix_lines,
        od_cost_matrix_layer, auxiliary_dir]:
      delete(delete_path)

  try:
    """
    Here we carry out the six steps of the tool
    """
    # Step 1
    if success:
      AddMessage(STEP_1_STARTED)
      # If necessary, convert input buildings to point feature class
      if buildings_description.shapeType == "Polygon":
        AddMessage(POINT_CONVERSION_STARTED)
        to_point_feature_class(output_feature_class, inputs[INPUT_POINTS],
            inputs[POINT_LOCATION])
        AddMessage(POINT_CONVERSION_FINISHED)
      if Exists(adj_dbf):
        AddMessage(ADJACENCY_LIST_COMPUTED)
        if node_locations_needed:
          calculate_network_locations(inputs[INPUT_POINTS],
              inputs[INPUT_NETWORK])
        AddMessage(STEP_1_FINISHED)
      else:
        try:
          compute_adjacency_list(inputs[INPUT_POINTS], inputs[INPUT_NETWORK],
              inputs[ID_ATTRIBUTE], inputs[IMPEDANCE_ATTRIBUTE],
              inputs[ACCUMULATOR_ATTRIBUTES], inputs[SEARCH_RADIUS],
              inputs[OUTPUT_LOCATION], adj_dbf_name)
          AddMessage(STEP_1_FINISHED)
        except:
          AddWarning(GetMessages(2))
          AddMessage(STEP_1_FAILED)
          success = False

    # Step 2
    if success:
      AddMessage(STEP_2_STARTED)
      try:
        distance_field = trim("Total_%s" % inputs[IMPEDANCE_ATTRIBUTE])
        accumulator_fields = set([trim("Total_%s" % accumulator_attribute)
            for accumulator_attribute in inputs[ACCUMULATOR_ATTRIBUTES].split(
            ";") if accumulator_attribute != "#"])
        # Graph representation: dictionary mapping node id's to Node objects
        nodes = {}
        # The number of rows in |adj_dbf|
        directed_edge_count = int(GetCount_management(adj_dbf).getOutput(0))
        graph_progress = Progress_Bar(directed_edge_count, 1, STEP_2)
        rows = UpdateCursor(adj_dbf)
        for row in rows:
          # Get neighboring nodes, and the distance between them
          origin_id = row.getValue(trim(ORIGIN_ID_FIELD_NAME))
          destination_id = row.getValue(trim(DESTINATION_ID_FIELD_NAME))
          distance = float(row.getValue(distance_field))
          # Make sure the nodes are recorded in the graph
          for id in [origin_id, destination_id]:
            if not id in nodes:
              nodes[id] = Node()
          # Make sure that the nodes are neighbors in the graph
          if origin_id != destination_id and distance >= 0:
            accumulations = {}
            for field in accumulator_fields:
              accumulations[field] = float(row.getValue(field))
            nodes[origin_id].add_neighbor(destination_id, distance,
              accumulations)
            nodes[destination_id].add_neighbor(origin_id, distance,
              accumulations)
          graph_progress.step()
        N = len(nodes) # The number of nodes in the graph
        if N == 0:
          AddWarning(WARNING_NO_NODES)
          success = False
        AddMessage(STEP_2_FINISHED)
      except:
        AddWarning(GetMessages(2))
        AddMessage(STEP_2_FAILED)
        success = False

    # Step 3
    if success:
      AddMessage(STEP_3_STARTED)
      try:
        get_weights = inputs[NODE_WEIGHT_ATTRIBUTE] != "#"
        get_locations = node_locations_needed
        # Keep track of number nodes in input points not present in the graph
        point_not_in_graph_count = 0
        input_point_count = int(
            GetCount_management(inputs[INPUT_POINTS]).getOutput(0))
        node_attribute_progress = Progress_Bar(input_point_count, 1, STEP_3)
        rows = UpdateCursor(inputs[INPUT_POINTS])
        for row in rows:
          id = row.getValue(inputs[ID_ATTRIBUTE])
          if not id in nodes:
            point_not_in_graph_count += 1
            continue
          if get_weights:
            setattr(nodes[id], WEIGHT,
                row.getValue(trim(inputs[NODE_WEIGHT_ATTRIBUTE])))
          if get_locations:
            snap_x = row.getValue(trim("SnapX"))
            snap_y = row.getValue(trim("SnapY"))
            setattr(nodes[id], LOCATION, (snap_x, snap_y))
          node_attribute_progress.step()
        if point_not_in_graph_count:
          AddWarning(WARNING_POINTS_NOT_IN_GRAPH(N,
              point_not_in_graph_count))
        AddMessage(STEP_3_FINISHED)
      except:
        AddWarning(GetMessages(2))
        AddMessage(STEP_3_FAILED)
        success = False

    # Step 4
    if success:
      AddMessage(STEP_4_STARTED)
      try:
        # Compute measures
        compute_centrality(nodes, selected_features, inputs[COMPUTE_REACH],
            inputs[COMPUTE_GRAVITY], inputs[COMPUTE_BETWEENNESS],
            inputs[COMPUTE_CLOSENESS], inputs[COMPUTE_STRAIGHTNESS],
            inputs[SEARCH_RADIUS], inputs[USE_NETWORK_RADIUS], inputs[BETA],
            inputs[NORMALIZE_RESULTS], accumulator_fields)
        AddMessage(STEP_4_FINISHED)
      except:
        AddWarning(GetMessages(2))
        AddMessage(STEP_4_FAILED)
        success = False

    # Step 5
    if success:
      AddMessage(STEP_5_STARTED)
      try:
        # Make output layer
        MakeFeatureLayer_management(in_features=output_feature_class,
            out_layer=output_layer_name)
        # Save output layer
        SaveToLayerFile_management(output_layer_name, output_layer,
            "ABSOLUTE")
        # Use a test node to figure out which metrics were computed
        test_node_id = selected_features.pop()
        # Make sure the test node is in the graph
        while test_node_id not in nodes:
          test_node_id = selected_features.pop()
        test_node = nodes[test_node_id]
        measures = set([measure for measure in dir(test_node) if (measure in
            FINAL_ATTRIBUTES or is_accumulator_field(measure))])
        # Add a field in the output layer for each computed metric
        for measure in measures:
          AddField_management(in_table=output_layer, field_name=trim(measure),
              field_type="DOUBLE", field_is_nullable="NON_NULLABLE")
        # Figure out the id field to use based on the type of input buildings
        if (buildings_description.shapeType == "Polygon" and
            inputs[ID_ATTRIBUTE] == ORIGINAL_FID):
          id_field = "FID"
        else:
          id_field = inputs[ID_ATTRIBUTE]
        # Fill the layer with the metric values
        write_progress = Progress_Bar(N, 1, STEP_5)
        layer_rows = UpdateCursor(output_layer)
        for row in layer_rows:
            id = row.getValue(id_field)
            for measure in measures:
              # If no value was computed for this node id, set value to 0
              value = 0
              if id in nodes and hasattr(nodes[id], measure):
                value = getattr(nodes[id], measure)
              row.setValue(trim(measure), value)
            layer_rows.updateRow(row)
            write_progress.step()
        # Save to toolbox output
        SetParameterAsText(OUTPUT_FEATURE_CLASS, output_feature_class)
        AddMessage(STEP_5_FINISHED)
      except:
        AddWarning(GetMessages(2))
        AddMessage(STEP_5_FAILED)
        success = False

    # Step 6
    if success:
      AddMessage(STEP_6_STARTED)
      # Apply symbology
      try:
        ApplySymbologyFromLayer_management(in_layer=output_layer,
            in_symbology_layer=symbology_layer)
      except:
        AddWarning(WARNING_APPLY_SYMBOLOGY_FAILED)
        AddWarning(GetMessages(2))
        AddMessage(STEP_6_FAILED)
      # Display
      try:
        current_map_document = mapping.MapDocument("CURRENT")
        data_frame = mapping.ListDataFrames(current_map_document,
            "Layers")[0]
        add_layer = mapping.Layer(output_layer)
        mapping.AddLayer(data_frame, add_layer, "AUTO_ARRANGE")
        AddMessage(STEP_6_FINISHED)
      except:
        AddWarning(WARNING_FAIL_TO_DISPLAY)
        AddWarning(GetMessages(2))
        AddMessage(STEP_6_FAILED)

    # Clean up
    clean_up()

    AddMessage(SUCCESS if success else FAILURE)

  except ExecuteAbort:
    clean_up()
コード例 #20
0
""""----------------------------------------------------------------------------
Name:         coreconcepts.py
Purpose:      coreconcepts library
Project:      language for spatial computing
Author:       Kuhn et al. 2018, adapted by Selina Studer
License:      Apache License 2.0
Created:      26.12.2018
Libraries:    arcpy -----------"""

from arcpy import CheckOutExtension, env

env.overwriteOutput = True

# Check out any necessary licenses
CheckOutExtension("spatial")


class CcField(object):
    """
    Abstract class for core concept 'field'
    """
    def __init__(self, filepath, objIndex, domain):
        """
        :param filepath: data file path
        :param objIndex: unique ID
        :param domain: desc.extent of the geo_object
        """
        self.filepath = filepath
        self.sObj = objIndex
        self.domain = domain
コード例 #21
0
from Constants import WEIGHT
from Node import Node
from os.path import join
from sys import argv
from Utils import all_values_in_column
from Utils import basename
from Utils import calculate_network_locations
from Utils import delete
from Utils import Invalid_Input_Exception
from Utils import is_accumulator_field
from Utils import Progress_Bar
from Utils import to_point_feature_class
from Utils import trim

env.overwriteOutput = True # Enable overwritting
CheckOutExtension("Network")

# Success of the program through the six steps
success = True

# Inputs to the tool
if len(argv) != INPUT_COUNT + 1:
  raise Exception("Invalid number of inputs")
input_number = index()
input_number.next() # Skip over sys.argv[0]
inputs = {}
inputs[INPUT_BUILDINGS] = argv[input_number.next()]
inputs[POINT_LOCATION] = ("INSIDE" if argv[input_number.next()] == "true" else
    "CENTROID")
inputs[INPUT_NETWORK] = argv[input_number.next()]
inputs[COMPUTE_REACH] = argv[input_number.next()] == "true"
コード例 #22
0
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages, AddError,\
        ListDatasets, env, SetProgressor, SetProgressorLabel, SetProgressorPosition, ResetProgressor, Exists
    from arcpy.management import CreateFileGDB, CreateMosaicDataset, AddRastersToMosaicDataset
    from arcpy import Describe
    from os.path import join, exists
    from os import mkdir, makedirs

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError
        try:
            from PIL import Image
        except ModuleNotFoundError:
            AddError(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            print(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            exit()

        env.workspace = in_mosaic_gdb
        mosaics = ListDatasets("*", "Mosaic")
        file_count = len(mosaics)
        count = 0
        SetProgressor("step", "Begin Processing Files...", 0, file_count, 1)
        if not exists(out_folder):
            makedirs(out_folder)
        fileGDB = join(out_folder, "ortho_mosaics.gdb")
        if not Exists(fileGDB):
            CreateFileGDB(out_folder, "ortho_mosaics.gdb")
        for mosaic in mosaics:
            print("processing mosaic {0} of {1}".format(count + 1, file_count))
            in_mosaic = join(in_mosaic_gdb, mosaic)
            i_list, extent = get_images_and_stats(
                in_mosaic
            )  # Obtain image statistics and info from mosaic for processing
            for i in i_list:  # Check that output folder is not the path of i
                if out_folder == path.dirname(i[0]):
                    AddError(
                        "outFolder cannot be the same folder/directory as images referenced in the mosaic dataset"
                    )
                    exit()
            if not path.exists(out_folder):
                makedirs(out_folder)
            out_tile_folder = join(out_folder, "tiles{}".format(count))
            mkdir(out_tile_folder)
            SetProgressorLabel("Texturing Mosaic {0}...".format(count))
            texture_images(i_list, extent, in_texture, in_polygon,
                           out_tile_folder, method,
                           blur_distance)  # Generate Texture-Masked tiles

            mosaic_name = "tiles{}_".format(count)
            mosaic_dataset = join(fileGDB, mosaic_name)
            SetProgressorLabel(
                "Creating Mosaic Dataset for Tiles of {0}...".format(mosaic))
            sr = Describe(in_mosaic).spatialReference
            CreateMosaicDataset(fileGDB, mosaic_name, sr, num_bands,
                                pixel_depth, product_definition,
                                product_band_definitions)
            SetProgressorLabel(
                "Adding of {0} to Mosaic Dataset...".format(mosaic))
            AddRastersToMosaicDataset(mosaic_dataset, "Raster Dataset",
                                      out_tile_folder, "UPDATE_CELL_SIZES",
                                      "UPDATE_BOUNDARY", "NO_OVERVIEWS", None,
                                      0, 1500, None, '', "SUBFOLDERS",
                                      "ALLOW_DUPLICATES", "NO_PYRAMIDS",
                                      "NO_STATISTICS", "NO_THUMBNAILS", '',
                                      "NO_FORCE_SPATIAL_REFERENCE",
                                      "NO_STATISTICS", None, "NO_PIXEL_CACHE")
            SetProgressorPosition()
            count += 1
        ResetProgressor()
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("Image Analyst license is unavailable")
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))