Esempio n. 1
0
def toOpen(featureClass, outJSON, includeGeometry="geojson"):
    #check the file type based on the extention
    fileType=getExt(outJSON)
    #some sanity checking
    #valid geojson needs features, seriously you'll get an error
    if not int(GetCount_management(featureClass).getOutput(0)):
        AddMessage("No features found, skipping")
        return
    elif not fileType:
        AddMessage("this filetype doesn't make sense")
        return
    #geojson needs geometry
    if fileType in ("geojson", "topojson"):
        includeGeometry="geojson"
    elif fileType=="sqlite":
        includeGeometry="well known binary"
    else:
        includeGeometry=includeGeometry.lower()
    #open up the file
    outFile=prepareFile(outJSON,featureClass,fileType,includeGeometry)
    #outFile will be false if the format isn't defined
    if not outFile:
        AddMessage("I don't understand the format")
        return
    #write the rows
    writeFile(outFile,featureClass,fileType,includeGeometry)
    #go home
    closeUp(outFile,fileType)
Esempio n. 2
0
    def csv(self):
        shapefile_type = self.desc.shapeType

        try:
            if shapefile_type in ['Point', 'MultiPoint']:
                with open(self.full_path + '.csv', 'wb') as f:
                    writer = csv.writer(f)
                    try:
                        self.fields.remove('Shape')
                        self.fields.remove('FID')
                    except:
                        pass
                    headers = copy.deepcopy(self.fields)
                    self.fields.append('SHAPE@XY')
                    headers.extend(['LAT', 'LNG'])
                    writer.writerow(headers)
                    cur = arcpy.SearchCursor(self.shapefile)
                    with arcpy.da.SearchCursor(self.shapefile,
                                               self.fields) as cur:
                        for row in cur:
                            lon, lat = row[-1]
                            coords = (lat, lon)
                            row = row[0:-1] + coords
                            writer.writerow(row)
                    return True

            else:
                AddMessage('Sorry, converting layers of geometry type ' +
                           shapefile_type + ' is not supported.')
                return False

        except Exception as err:
            AddMessage('Unable to export CSV file: ' + str(err))
            return False
Esempio n. 3
0
def create_views(database_path):
    database = database_path

    # create database connection
    conn = create_connection(database)
    
    # Create desktop results view
    AddMessage('creating desktop results view')
    view_desktop_results_sql = os.path.join(sql_dir, 'view_desktop_results.sql')
    create_view(conn, view_desktop_results_sql)
    
    # Create transect review view
    AddMessage('creating transects review')
    view_transect_sql = os.path.join(sql_dir, 'view_transect_data.sql')
    create_view(conn, view_transect_sql)
    
    # Create site scale values view
    view_map_unit_sql = os.path.join(sql_dir, 'view_site_scale_values.sql')
    create_view(conn, view_map_unit_sql)
    
    # Create reserve account view
    view_reserve_account_sql = os.path.join(sql_dir, 'view_reserve_account.sql')
    create_view(conn, view_reserve_account_sql)
    
    # Create baseline view
    view_baseline_sql = os.path.join(sql_dir, 'view_baseline.sql')
    create_view(conn, view_baseline_sql)
    
    conn.close()
def createMosaics(gdb, mosaicName, folder, spatialRef, pixel_type):
    # Create mosaic dataset
    arcpy.CreateMosaicDataset_management(gdb, mosaicName, spatialRef, None, pixel_type, "CUSTOM", None)
    mosaicDS = join(gdb, mosaicName)
    AddMessage('Mosaic dataset {} created...'.format(mosaicName))

    # Add rasters to mosaic and set cell size
    AddMessage('Adding rasters to mosaic dataset...')
    AddRastersToMosaicDataset(mosaicDS, "Raster Dataset", folder, "UPDATE_CELL_SIZES",
                                               "UPDATE_BOUNDARY", "NO_OVERVIEWS", None, 0, 1500, None, '',
                                               "SUBFOLDERS", "ALLOW_DUPLICATES", "BUILD_PYRAMIDS",
                                               "CALCULATE_STATISTICS", "NO_THUMBNAILS", '',
                                               "NO_FORCE_SPATIAL_REFERENCE", "ESTIMATE_STATISTICS", None,
                                               "NO_PIXEL_CACHE")
    # Update mosaic cell size
    arcpy.AddMessage('Updating mosaic cell size...')
    cellSize = arcpy.GetRasterProperties_management(mosaicDS, "CELLSIZEX")
    newSize = float(float(cellSize.getOutput(0))/2)
    arcpy.SetMosaicDatasetProperties_management(mosaicDS, cell_size=newSize)

    # Add results to the display
    arcpy.AddMessage('Adding results to map views...')
    aprx = arcpy.mp.ArcGISProject("CURRENT")
    for m in aprx.listMaps():
        if m.mapType == "MAP":
            m.addDataFromPath(mosaicDS)
Esempio n. 5
0
def import_policy_tables(database_path, policy_tables_path):    
    database = database_path
    policy_tables_folder = policy_tables_path
    
    # create database connection
    conn = create_connection(database)

    # read in tables
    start = time.time()
    AddMessage('reading policy tables at ' + policy_tables_folder)
    for table in os.listdir(policy_tables_folder):
        # read in table
        table_path = os.path.join(policy_tables_folder, table)
        df = pd.read_csv(table_path)
        
        # read in sql file
        sql_file_name = 'insert_' + table[:-4].replace('-', '_') + '.sql'
        sql_file_path = os.path.join(sql_dir, sql_file_name)
        
        # insert data into database
        with conn:
            AddMessage('inserting ' + table[:-4].replace('-', '_'))
            for _, row in df.iterrows():
                data = tuple(row)
                insert_data(conn, sql_file_path, data)
    
    conn.close()
        
    AddMessage('{} seconds elapsed'.format(round((time.time() - start), 2)))
    def execute(self, parameters, messages):

        from_db = parameters[reproject_from_db].valueAsText
        to_db = parameters[reproject_to_db].valueAsText
        projection = parameters[reproject_projection].valueAsText
        skip_empty = parameters[reproject_skip_empty].valueAsText

        AddMessage('Tool received parameters: {}'.format(', '.join(
            [p.valueAsText for p in parameters])))

        from arcpy import env, Exists

        if skip_empty == 'true':
            env.skipEmpty = True
        else:
            env.skipEmpty = False

        #run the functions
        if not Exists(projection):
            AddMessage('Projection file {} does not exist'.format(projection))
            return

        # just set the output coordinate system and outputs
        # will be projected :)
        env.skipAttach = True
        env.outputCoordinateSystem = projection

        #call the create datasets function passing the foreach layer function to it
        Geodatabase.process_datasets(from_db, to_db, None, None, None)
Esempio n. 7
0
def calculatePointElevationField(points, raster, field_name):

    #monitor progress by counting features
    view = MakeTableView_management(points, 'points')
    count = int(GetCount_management('points').getOutput(0))
    SetProgressor('step', 'Extracting point elevations', 0, count)
    AddMessage('{} features to process'.format(count))

    # Get the object id field
    oid = Describe(points).OIDFieldName

    # make an update cursor and update each row's elevation field
    cursor = UpdateCursor(points, [field_name, 'SHAPE@', oid])

    # make a temporary dict to store our elevation values we extract
    elevations = {}

    for row in cursor:
        row[0] = getElevationAtPoint(raster, row[1])
        cursor.updateRow(row)
        AddMessage('row updated to {}; oid: {}'.format(row[0], row[2]))
        SetProgressorPosition()

    # release the data
    del cursor

    #reset this progressor
    ResetProgressor()
def buildNetwork( NETWORK_FILE_PATH ):
    """
    DESCRIPTION:
        This function builds network from ND file.
        ND file has line
    PARAMETERS:
        PATH_ND : String < path to ND file.
    RETURN:
        csNetwork
    """
    DEBUG = False
    tStart = time()
    network = csNetwork()
    featureClassPath    = getEdgePathFromNetwork( NETWORK_FILE_PATH )
    rows                = SearchCursor(featureClassPath,["SHAPE@","OID@" ])
    if DEBUG:
        AddMessage("Delta: " +  str( time() -  tStart ))
    for row in rows:
        featShape   = row[0]
        pFirst      = featShape.firstPoint
        pLast       = featShape.lastPoint
        l           = featShape.length3D
        points = polyline_points(featShape)
        network.addConnections(arcGISPointAsTuple(pFirst), arcGISPointAsTuple(
            pLast), points, l, str(row[1]))
    if DEBUG:
        tEnd = time()
        AddMessage("Delta: " +  str( tEnd -  tStart ))
    network.remap()
    if DEBUG:
        tEnd = time()
        AddMessage("Delta: " +  str( tEnd -  tStart ))
    return network
Esempio n. 9
0
def find_redundancy_index(network, points, edge_to_points, coeff, origin_id,
                          destination_id, search_radius, weights_available):
    """
  Returns the redundancy index and unique segments for the given pair of points
      |origin_id| and |destination_id|. |network| is the csNetwork in which the
      points reside. |points| is a mapping from point ids to csPoint objects.
      |edge_to_points| is a mapping from edge ids to lists of the csPoints that
      reside on the respective edge. |coeff| is the redundancy coefficient,
      assumed to be at least 1. |weights_available| should be True if the points
      have weights so that the redundancy index can be computed appropriately.
      Returns None if the shortest path between the two points is larger than
      |search_radius| or there is no network path between the two points.
  """
    # print current OD pair
    AddMessage("O=%s D=%s" % (origin_id, destination_id))
    # add origin and destination pseudo nodes to network
    o_point = points[origin_id]
    network.addPseudoNode(o_point.tValue, o_point.Segment, "O", o_point.Point)
    d_point = points[destination_id]
    network.addPseudoNode(d_point.tValue, d_point.Segment, "D", d_point.Point)
    # find shortest path distance between origin and destination
    search_result = find_shortest_path(network, "O", "D")
    if search_result is None:
        AddMessage("No path found")
        network.clearPsudoNodes()
        return None
    shortest_path, shortest_path_dist = search_result
    if shortest_path_dist > search_radius:
        AddMessage(
            "Shortest path distance <%s> larger than search radius <%s>" %
            (shortest_path_dist, search_radius))
        network.clearPsudoNodes()
        return None
    # compute unique segments
    unique_segments = _redundant_unique_segments(network,
                                                 shortest_path_dist * coeff)
    # compute redundancy
    # TODO(mikemeko, raul_kalvo): think of better ideas for what to do when
    #     redundancy index denominator is 0
    if weights_available:
        shortest_path_weight_sum = sum(
            edge_building_weight_sum(network, edge_to_points, edge_id)
            for edge_id in shortest_path)
        unique_segments_weight_sum = sum(
            edge_building_weight_sum(network, edge_to_points, edge_id)
            for edge_id in unique_segments)
        redundancy = (unique_segments_weight_sum / shortest_path_weight_sum
                      if shortest_path_weight_sum > 0 else 1)
    else:
        unique_segments_total_dist = sum(network.Edges[edge_id].Length
                                         for edge_id in unique_segments)
        redundancy = (unique_segments_total_dist /
                      shortest_path_dist if shortest_path_dist > 0 else 1)
    # compute unique network segments
    unique_network_segments = set(map(network.originalEdge, unique_segments))
    # result
    AddMessage("Redundancy=%.5f" % (redundancy))
    network.clearPsudoNodes()
    return redundancy, unique_network_segments
def main(project_path='traffic-map.aprx',
         service_name="Traveler_Info",
         folder_name="Traveler_Info"):
    """Publishes a project map to a service
    """
    project_path = abspath(project_path)
    if not exists(project_path):
        raise FileNotFoundError("File not found: %s" % project_path)
    # Open the project
    AddMessage("Opening %s" % project_path)
    aprx = ArcGISProject(project_path)
    # Get the first map
    the_map = aprx.listMaps()[0]
    the_layers = the_map.listLayers()

    # Create the output path string by replacing the file extension.
    draft_path = re.sub(r"\.aprx$", ".sddraft", project_path)
    if exists(draft_path):
        AddMessage("Deleting preexisting file: %s" % draft_path)
        os.remove(draft_path)
    AddMessage("Creating %s from %s..." % (project_path, draft_path))
    # Create the web layer SDDraft file.
    try:
        # ArcGIS Pro < 2.0: Fails here with a RuntimeError that has no message
        # if ArcGIS Pro is not open and signed in to ArcGIS Online.
        CreateWebLayerSDDraft(the_layers,
                              draft_path,
                              service_name,
                              "MY_HOSTED_SERVICES",
                              "FEATURE_ACCESS",
                              folder_name=folder_name,
                              copy_data_to_server=True,
                              summary="Test service",
                              tags="test,traffic,traveler",
                              description="Test Service",
                              use_limitations="For testing only")
    except RuntimeError as ex:
        if ex.args:
            AddError("Error creating %s. %s" % (draft_path, ex.args))
        else:
            AddError("Error creating %s. No further info provided." %
                     draft_path)
    else:
        sd_path = re.sub(r"draft$", "", draft_path)
        if exists(sd_path):
            AddMessage("Deleting preexisting file: %s" % sd_path)
            os.remove(sd_path)
        service_definition = arcpy.server.StageService(draft_path)
        arcpy.server.UploadServiceDefinition(service_definition,
                                             "My Hosted Services")
Esempio n. 11
0
def calculate_network_locations(points, network):
    """
  Computes the locations of |points| in |network|
  |points|: a feature class (points or polygons)
  |network|: a network dataset
  """
    AddMessage(CALCULATE_LOCATIONS_STARTED)
    CalculateLocations_na(in_point_features=points,
                          in_network_dataset=network,
                          search_tolerance=SEARCH_TOLERANCE,
                          search_criteria=("%s SHAPE; %s SHAPE;" %
                                           network_features(network)),
                          exclude_restricted_elements="INCLUDE")
    AddMessage(CALCULATE_LOCATIONS_FINISHED)
def RoadNameFixCaller(roadsFeaturesPath, accidentDataTablePath):
    print "RoadNameFixCaller received the following arguments:"
    print "roadFeaturesPath: " + roadsFeaturesPath
    print "accidentDataTablePath: " + accidentDataTablePath

    optionsInstance = optionsHolder(
    )  # Creates an instance of the empty class.

    ## defaults
    optionsInstance.roadsFeaturesLocation = r'\\gisdata\ArcGIS\GISdata\DASC\NG911\Final\dt_testing\Region1_BU_Final_RoadChecks.gdb\NG911\RoadCenterline'
    optionsInstance.accidentDataTable = r'\\gisdata\ArcGIS\GISdata\DASC\NG911\Final\dt_testing\AccGeoToDasc.gdb\ButlerCoSend'
    optionsInstance.useKDOTFields = False
    ## GDB Location should be derived from accidentDataWithOffsetOutput location.

    optionsInstance = UpdateOptionsWithParameters(optionsInstance)
    optionsInstance.gdbPath = getGDBLocationFromFC(
        optionsInstance.roadsFeaturesLocation)

    optionsInstance.roadsFeaturesLocation = roadsFeaturesPath
    optionsInstance.accidentDataTable = accidentDataTablePath

    CreateUniqueRoadNameTable(optionsInstance)
    RoadNameRepair(optionsInstance)
    # Change optionsInstance to use the KDOT fields
    # and then run the RoadNameRepair function again.
    optionsInstance.useKDOTFields = True
    RoadNameRepair(optionsInstance)
    AddMessage("Road Name Fixes complete.")
Esempio n. 13
0
def process_feature_classes(input_ws, output_ws, foreach_layer = None):
    """
    processes each featureclass with an optional function
    input_ws - the database or dataset path to process feature classes
    output_ws - the output for the feature classes
    foreach_layer - the function to process the feature classes
    """
    from arcpy import env, ListFeatureClasses, FeatureClassToGeodatabase_conversion, \
        AddWarning, AddMessage, GetCount_management, FeatureClassToFeatureClass_conversion
    from os.path import join
    env.workspace = input_ws
    feature_classes = ListFeatureClasses()
    for feature_class in feature_classes:
        
        AddMessage('Processing {}...'.format(feature_class))
        if env.skipEmpty:
            count = int(GetCount_management(feature_class)[0])
            if count == 0:
                AddWarning('Skipping because table is empty: {}'.format(feature_class))
                continue
        try:
            if foreach_layer:
                foreach_layer(input_ws, output_ws, feature_class)
            else:
                #copy each feature class over
                output_path = join(output_ws, get_name(feature_class))
                delete_existing(output_path)
                FeatureClassToFeatureClass_conversion(feature_class, output_ws, get_name(feature_class))
        except Exception as e:
            AddWarning('Error processing feature class {} - {}'.format(feature_class, e))
Esempio n. 14
0
 def dump(self, f):
     self.start()
     #print('writing')
     f.write('{"type":"Topology","bbox":')
     dump([self.bounds.x0, self.bounds.y0, self.bounds.x1, self.bounds.y1],
          f)
     f.write(',"transform":')
     dump(
         {
             'scale': [1.0 / self.kx, 1.0 / self.ky],
             'translate': [self.bounds.x0, self.bounds.y0]
         }, f)
     #print('dumping objects')
     f.write(',"objects":')
     i = 0
     AddMessage('one last time')
     for thing in self.get_objects():
         i += 1
         #AddMessage('on ' + str(i) + ' for the last time')
         f.write(thing)
     #print('dumping arcs')
     f.write(',"arcs":[')
     first = True
     for arc in self.ln.get_arcs():
         if first:
             first = False
         else:
             f.write(',')
         dump(arc, f)
     f.write(']}')
Esempio n. 15
0
def findTheMostInterestingRow(listOfRows, testDirection, maxAngleDiff):
    currentUpdateList = [-1, -1, -1, "False"]
    # What about 'U' direction or null direction?
    # Make some logic to deal with those.
    # Technically, the else statement does that.
    # Should it do anything other than printing a line
    # that says that the direction for the row
    # is invalid?
    
    # Refactored: Test with unit tests to make sure this
    # still works properly.
    # 2015-02-20: All tests passed.
    # 2015-02-25: Field order changed. Reorder unit test data to match.
    # 2015-02-25: Function return type changed. Modify unit test data to match.
    # 2015-02-26: All tests passed.
    if testDirection == "N":
        # Logic related to "N"
        targetAngle = 90
        
        currentUpdateList = createUpdateList(listOfRows, targetAngle, maxAngleDiff)
            
    elif testDirection == "E":
        targetAngle = 0
        
        currentUpdateList = createUpdateList(listOfRows, targetAngle, maxAngleDiff)
            
    elif testDirection == "S":
        targetAngle = 270
        
        currentUpdateList = createUpdateList(listOfRows, targetAngle, maxAngleDiff)
            
    elif testDirection == "W":
        targetAngle = 180
        
        currentUpdateList = createUpdateList(listOfRows, targetAngle, maxAngleDiff)
            
    elif testDirection == "NE":
        targetAngle = 45
        
        currentUpdateList = createUpdateList(listOfRows, targetAngle, maxAngleDiff)
            
    elif testDirection == "SE":
        targetAngle = 315
        
        currentUpdateList = createUpdateList(listOfRows, targetAngle, maxAngleDiff)
            
    elif testDirection == "SW":
        targetAngle = 225
        
        currentUpdateList = createUpdateList(listOfRows, targetAngle, maxAngleDiff)
            
    elif testDirection == "NW":
        targetAngle = 135
        
        currentUpdateList = createUpdateList(listOfRows, targetAngle, maxAngleDiff)
        
    else:
        AddMessage("Invalid direction given for the At Road: " + testDirection)
        
    return currentUpdateList # Change to return an update list.
Esempio n. 16
0
def printmes(x):
    try:
        from arcpy import AddMessage
        AddMessage(x)
        print(x)
    except ModuleNotFoundError:
        print(x)
Esempio n. 17
0
def tweet(msg):
    """Print a message for both arcpy and python.
    : msg - a text message
    """
    m = "\n{}\n".format(msg)
    AddMessage(m)
    print(m)
def process_feature_classes(input_ws, output_ws, foreach_layer=None):
    """
    processes each featureclass with an optional function
    input_ws - the database or dataset path to process feature classes
    output_ws - the output for the feature classes
    foreach_layer - the function to process the feature classes
    """
    from arcpy import env, ListFeatureClasses, FeatureClassToGeodatabase_conversion, AddWarning, AddMessage
    from os.path import join
    env.workspace = input_ws
    feature_classes = ListFeatureClasses()
    for feature_class in feature_classes:

        AddMessage('Processing {}...'.format(feature_class))
        try:
            if foreach_layer:
                foreach_layer(input_ws, output_ws, feature_class)
            else:
                #copy each feature class over
                output_path = join(output_ws, get_name(feature_class))
                delete_existing(output_path)
                FeatureClassToGeodatabase_conversion(feature_class, output_ws)
        except Exception as e:
            AddWarning('Error processing feature class {} - {}'.format(
                feature_class, e))
Esempio n. 19
0
 def __init__(self, featureClass):
     self.featureCount = int(GetCount_management(featureClass).getOutput(0))
     SetProgressor("step",
                   "Found {0} features".format(str(self.featureCount)), 0,
                   100, 1)
     AddMessage("Found " + str(self.featureCount) + " features")
     self.percent = 0
     self.current = 0
def pandas_operation_here(lidar_points):
    AddMessage('detected {} points in pandas dataframe'.format(
        lidar_points.size))
    print("first 10 rows in pandas dataframe")
    print(lidar_points[:10])
    print("Data Type: {}".format(type(lidar_points)))
    print("Array Shape: {}".format(lidar_points.shape))
    print("Array Number of Dimensions: {}".format(lidar_points.ndim))
def numpy_operation_here(lidar_points):
    AddMessage('detected {} points in numpy array'.format(lidar_points.size))
    print("first 10 rows in numpy array")
    print(lidar_points[:10])
    print("Data Type: {}".format(type(lidar_points)))
    print("Array Shape: {}".format(lidar_points.shape))
    print("Array Number of Dimensions: {}".format(lidar_points.ndim))
    '''
 def printAdjacencyMatrix(self, printOut=True):
     """
     0    Origin
     1    Destination
     2    Length
     3    EDGE ID
     """
     outputTable = []
     for k in self.E:
         e = self.E[k]
         s1 = "%s \t %s \t %s \t %s" % (e.Start, e.End, e.Length, k)
         s2 = "%s \t %s \t %s \t %s" % (e.End, e.Start, e.Length, k)
         AddMessage(s1)
         AddMessage(s2)
         outputTable.append(s1)
         outputTable.append(s2)
     return outputTable
Esempio n. 23
0
    def process(self, input_fc, output_fc, use_template='false'):

        template = None
        if use_template == 'true':
            template = input_fc

        #get the directory, filename, and spatial reference
        sp_ref = Describe(input_fc).spatialReference
        directory, filename = Split_Path(output_fc)

        #create a new feature class
        AddMessage('Creating feature class {}'.format(output_fc))
        CreateFeatureclass_management(directory, filename, 'POINT', template,
                                      'DISABLED', 'DISABLED', sp_ref)

        #copy the geometry centroid
        AddMessage('Extracting endpoints...')
        line_to_endpoints(input_fc, output_fc)
Esempio n. 24
0
def extract_attachments(att_table, out_folder, group_by_field=None):
    # [<Field>, ...]
    l_fields = ListFields(att_table)

    # [dbo.schema.fieldname, ...]
    field_names = [f.name for f in l_fields]

    # [DBO.SCHEMA.FIELDNAME, ...]
    uppercase = [f.upper() for f in field_names]


    data_field = None
    name_field = None
    id_field = None
    
    data_field = [f for f in uppercase if 'DATA' in f.split('.')][0]
    name_field = [f for f in uppercase if 'ATT_NAME' in f.split('.')][0]
    id_field = [f.name for f in l_fields if f.type == 'OID'][0]

    fields = [data_field, name_field, id_field]
    AddMessage(fields)
	
    if group_by_field:
        if not group_by_field in field_names:
            raise Exception('Field {} not found in fields. \n'.format(group_by_field, str(field_names)))
        fields.append(group_by_field)
        
    # verify path
    verify_path_exists(out_folder)

    with SearchCursor(att_table, fields) as cursor:
        for row in cursor:

            full_out_folder = out_folder
            if group_by_field:

                # get the field name
                group_folder = row[ fields.index(group_by_field) ]
                
                full_out_folder = join(out_folder, group_folder)

                # double check folder path
                verify_path_exists(full_out_folder)

            # get the attachment file and create a filename
            attachment = row[0]
            filename = 'ATT_{2}_{1}'.format(*row)

            # write the output file and update the row's value to the file name
            open(join(full_out_folder, filename), 'wb').write(attachment.tobytes())
            
            # cleanup
            del row
            del filename
            del attachment
def las_tiles_to_numpy_pandas(in_lidar_folder, sr, lidar_format, returns,
                              class_codes, format_for_library):
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("3D") == "Available":
            CheckOutExtension("3D")
        else:
            # raise a custom exception
            raise LicenseError

        if not lidar_format.startswith(
                "."):  # Ensure lidar format contains a format decimal
            lidar_format = ".{}".format(lidar_format)
        supported_lidar_formats = [".las", ".zlas"]
        assert lidar_format in supported_lidar_formats, \
            "LiDAR format {0} unsupported. Ensure LiDAR format is in {1}".format(lidar_format, supported_lidar_formats)

        lidar_tiles = [
            f for f in listdir(in_lidar_folder)
            if f.endswith("{}".format(lidar_format))
        ]
        if len(lidar_tiles) < 1:
            AddError("No LiDAR tiles detected in input directory")
        count = 0
        for tile in lidar_tiles:
            AddMessage("processing lidar tile {0} of {1} : {2}".format(
                count + 1, len(lidar_tiles), tile))
            lidar_tile = join(in_lidar_folder, tile)
            las_tile_to_numpy_pandas(lidar_tile, sr, returns, class_codes,
                                     format_for_library)
            count += 1
        AddMessage("processing {} lidar tiles complete".format(count))

        # Check back in 3D Analyst license
        CheckInExtension("3D")
    except LicenseError:
        AddError("3D Analyst license is unavailable")
    except ExecuteError:
        AddError("3D Analyst license is unavailable")
        print(GetMessages(2))
def process_datasets(from_db,
                     to_db=None,
                     foreach_layer=None,
                     foreach_table=None,
                     foreach_dataset=None):
    """
    creates the projected datasets necessary and then calls the function
    to perform additional functions on each layer and table
    from_db - the input database to pull from
    to_db - the output database to place the processed data
    foreach_layer - the function to process each layer with
    foreach_table - the function to process each table with
    """
    #get the datasets in the input workspace
    from arcpy import AddMessage, AddWarning, CreateFeatureDataset_management, ListDatasets, Exists, env, ExecuteError
    AddMessage('Workspace: {}'.format(env.workspace))

    #handle feature classes at the top level. these are moved into _top dataset for
    #automatic projection handling
    copy_tables(from_db, to_db, foreach_table)

    process_feature_classes(from_db, to_db, foreach_layer)

    in_datsets = ListDatasets()
    if len(in_datsets):
        for dataset in in_datsets:
            to_dataset = get_name(dataset)
            from_dataset_path = '{}/{}'.format(from_db, dataset)
            to_dataset_path = '{}/{}'.format(to_db, to_dataset)
            AddMessage('Processing Dataset: {}'.format(from_dataset_path))
            try:
                if foreach_dataset:
                    foreach_dataset(from_db, to_db, dataset)
                else:
                    CreateFeatureDataset_management(to_db, to_dataset,
                                                    env.outputCoordinateSystem)
            except ExecuteError as e:
                AddWarning('Could not create dataset {}, {}'.format(
                    to_dataset, e))

            process_feature_classes(from_dataset_path, to_dataset_path,
                                    foreach_layer)
Esempio n. 27
0
def to_point_feature_class(feature_class, point_feature_class, point_location):
    """
  Converts a feature class to a point feature class
  |point_location|: parameter for conversion, should be "CENTROID" or "INSIDE"
  """
    if Exists(point_feature_class):
        AddMessage(POINT_CONVERSION_DONE)
    else:
        FeatureToPoint_management(in_features=feature_class,
                                  out_feature_class=point_feature_class,
                                  point_location=point_location)
Esempio n. 28
0
    def execute(self, parameters, messages):
        AddMessage('{}; {}; {};'.format(
            parameters[clip_from_db].valueAsText,
            parameters[clip_to_db].valueAsText,
            parameters[clip_projection].valueAsText))
        from_db = parameters[clip_from_db].valueAsText
        to_db = parameters[clip_to_db].valueAsText
        projection = parameters[clip_projection].valueAsText
        clip_layer = parameters[clip_clip_layer].valueAsText

        self.clip(from_db, to_db, projection, clip_layer)
Esempio n. 29
0
def create_database(database_path):
    # create a database connection
    conn = create_connection(database_path)
    
    # create tables
    if conn is not None:        
        # Create list of table files
        tables = [table for table in os.listdir(sql_dir) 
                  if table.startswith('create_table')]
        
        # Create each table from list
        for table in tables:
            AddMessage('Running {}'.format(table))
            with open (os.path.join(sql_dir, table), 'r') as sql_file:
                sql = sql_file.read()
                create_table(conn, sql)
        
        conn.close()
        
    else:
        AddMessage ("Error! failed to create database connection.")
Esempio n. 30
0
    def log_msg(self, msg, print_msg=True):
        """
        logs a message and prints it to the screen
        """
        time = datetime.datetime.now().strftime("%I:%M %p")
        self.log = f"{self.log}\n{time} | {msg}"
        if print_msg:
            print(msg)

        if self.add_logs_to_arc_messages:
            from arcpy import AddMessage
            AddMessage(msg)