Beispiel #1
0
def process():
    # TODO: Add User Error Reporting alerting user of issue with accessing their bucket.
    # Begin Script
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError
        try:
            bucket_url = 'https://s3.{0}.amazonaws.com/{1}/'.format(region, bucket_name)
            f_list = []
            fp_list = []
            '''
            conn = client('s3')  # again assumes boto.cfg setup, assume AWS S3
            
            for key in conn.list_objects(Bucket=bucket_name)['Contents']:
                if not key['Key'].endswith('/') and list_folders is False:
                    f_list.append(key['Key'])
                    fp_list.append(bucket_url + key['Key'])
                if list_folders is True:
                    f_list.append(key['Key'])
                    fp_list.append(bucket_url + key['Key'])
            '''
            s3r = resource('s3')
            bucket_list = [item.key for item in list(s3r.Bucket(bucket_name).objects.all())]
            for key in bucket_list:
                if not key.endswith('/') and list_folders is False:
                    f_list.append(key)
                    fp_list.append(bucket_url + key)
                if list_folders is True:
                    f_list.append(key)
                    fp_list.append(bucket_url + key)


            # Create a Pandas dataframe from the data.
            df = pd.DataFrame({'bucket_url': bucket_url, 'key': f_list, 'full_path': fp_list})

            with pd.ExcelWriter(out_spreadsheet) as writer:
                df.to_excel(writer)
        except NoCredentialsError:
            err_str = 'Detected Boto3 Credentials are not set. see the following instructions ' \
                      'https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html#configuration'
            AddError(err_str)
            raiseValueError(err_str)
        except s3_client.exceptions.NoSuchBucket:
            AddError('Aws bucket %s does not exist' % bucket_name)
            raise ValueError('Aws bucket %s does not exist' % bucket_name)
        # Check back in Image Analyst license
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("ImageAnalyst license is unavailable")
        print("ImageAnalyst license is unavailable")
    except ExecuteError:
        AddError(GetMessages(2))
        print(GetMessages(2))
def main(project_path='traffic-map.aprx',
         service_name="Traveler_Info",
         folder_name="Traveler_Info"):
    """Publishes a project map to a service
    """
    project_path = abspath(project_path)
    if not exists(project_path):
        raise FileNotFoundError("File not found: %s" % project_path)
    # Open the project
    AddMessage("Opening %s" % project_path)
    aprx = ArcGISProject(project_path)
    # Get the first map
    the_map = aprx.listMaps()[0]
    the_layers = the_map.listLayers()

    # Create the output path string by replacing the file extension.
    draft_path = re.sub(r"\.aprx$", ".sddraft", project_path)
    if exists(draft_path):
        AddMessage("Deleting preexisting file: %s" % draft_path)
        os.remove(draft_path)
    AddMessage("Creating %s from %s..." % (project_path, draft_path))
    # Create the web layer SDDraft file.
    try:
        # ArcGIS Pro < 2.0: Fails here with a RuntimeError that has no message
        # if ArcGIS Pro is not open and signed in to ArcGIS Online.
        CreateWebLayerSDDraft(the_layers,
                              draft_path,
                              service_name,
                              "MY_HOSTED_SERVICES",
                              "FEATURE_ACCESS",
                              folder_name=folder_name,
                              copy_data_to_server=True,
                              summary="Test service",
                              tags="test,traffic,traveler",
                              description="Test Service",
                              use_limitations="For testing only")
    except RuntimeError as ex:
        if ex.args:
            AddError("Error creating %s. %s" % (draft_path, ex.args))
        else:
            AddError("Error creating %s. No further info provided." %
                     draft_path)
    else:
        sd_path = re.sub(r"draft$", "", draft_path)
        if exists(sd_path):
            AddMessage("Deleting preexisting file: %s" % sd_path)
            os.remove(sd_path)
        service_definition = arcpy.server.StageService(draft_path)
        arcpy.server.UploadServiceDefinition(service_definition,
                                             "My Hosted Services")
Beispiel #3
0
def ExportGISProdLyrs(owner_workspace, admin_workspace):
    #similar to CANP, only for layers in another geodatabase, like GISPROD.
    owner = "GIS_CANSYS.SHARED."
    outpre = owner_workspace + "/" + owner

    print "exporting initialized at " + str(datetime.datetime.now())
    destConnection = owner_workspace  #once again, this could be change to the admin workspace
    for lyr in mapping.ListLayers(mxd):
        if lyr.name in gisprodlist:
            try:
                #manipulate the layer name a little bit differently
                lyrname = lyr.name[7:]
                print lyrname + " exporting..."
                outlyrname = lyrname
                outlyrobj = outpre + outlyrname
                Unlock(admin_workspace)
                FeatureClassToFeatureClass_conversion(lyr, destConnection,
                                                      outlyrname, "#", "#",
                                                      "#")
                ChangePrivileges_management(outlyrobj, "readonly", "GRANT",
                                            "AS_IS")
                print lyrname + " exported to " + outlyrname + " " + str(
                    datetime.datetime.now())
            except ExecuteError:
                msgs = GetMessages(2)
                AddError(msgs)
                print msgs
                endingTime = datetime.datetime.now()
                ScriptStatusLogging('CANP_LRS_EXPORT.py',
                                    'ExportGISProdLyrs Function',
                                    scriptFailure, startingTime, endingTime,
                                    GetMessages(2))

                pass
            except (RuntimeError, TypeError, NameError):
                print "TypeError on item" + lyr.name
                endingTime = datetime.datetime.now()
                ScriptStatusLogging('CANP_LRS_EXPORT.py',
                                    'ExportGISProdLyrs Function',
                                    scriptFailure, startingTime, endingTime,
                                    GetMessages(2))

                pass
            except:
                tb = sys.exc_info()[2]
                tbinfo = traceback.format_tb(tb)[0]
                pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
                    sys.exc_info()[1])
                msgs = "ArcPy ERRORS:\n" + GetMessages(2) + "\n"
                print pymsg + "\n"
                print msgs
                endingTime = datetime.datetime.now()
                ScriptStatusLogging('CANP_LRS_EXPORT.py',
                                    'ExportGISProdLyrs Function',
                                    scriptFailure, startingTime, endingTime,
                                    GetMessages(2))

        else:
            #print lyr.name +" was not in the export list and will be skipped"
            return
def extract_attachments(att_table, out_folder, att_field='file_name'):

    fields = ['DATA', 'ATT_NAME', 'ATTACHMENTID', att_field]

    # check for existence of required fields
    has_fields = [f.name for f in ListFields(att_table)]
    for f in fields:
        if f not in has_fields:
            AddError('Field {} is required in attribute table'.format(f))

    # verify path
    verify_path_exists(out_folder)

    with UpdateCursor(att_table, fields) as cursor:
        for row in cursor:

            # get the attachment file and create a filename
            attachment = row[0]
            filename = 'ATT_{2}_{1}'.format(*row)

            # write the output file and update the row's value to the file name
            open(join(out_folder, filename), 'wb').write(attachment.tobytes())
            row[3] = filename
            cursor.updateRow(row)

            # cleanup
            del row
            del filename
            del attachment
def unitsCalc(inFeature):
    SpatialRef = arcpy.Describe(inFeature).spatialReference
    obtainunits = SpatialRef.linearUnitName
    try:
        if obtainunits == "Foot_US":
            units = "Foot"
            return units
        if obtainunits == "Foot":
            units = "Foot"
            return units
        if obtainunits == "Meter":
            units = "Meter"
            return units
        if obtainunits not in ["Foot_US", "Foot", "Meter"]:
            AddError("Units Not Detected on {0} \n Terminating Process".format(inFeature))
            exit()
    except:
        AddError("Units Not Detected on {0} \n Terminating Process".format(inFeature))
        exit()
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages, AddError

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError
        try:
            from PIL import Image
        except ModuleNotFoundError:
            from arcpy import AddError
            AddError(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            print(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            exit()
        i_list, extent = get_images_and_stats(
            in_mosaic
        )  # Obtain image statistics and info from mosaic for processing
        for i in i_list:  # Check that output folder is not the path of i
            if out_folder == path.dirname(i[0]):
                AddError(
                    "outFolder cannot be the same folder/directory as images referenced in the mosaic dataset"
                )
                exit()
        if not path.exists(out_folder):
            makedirs(out_folder)
        texture_images(i_list, extent, in_texture, in_polygon, out_folder,
                       method, blur_distance)  # Generate Texture-Masked tiles

        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("Image Analyst license is unavailable")
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
def las_tiles_to_numpy_pandas(in_lidar_folder, sr, lidar_format, returns,
                              class_codes, format_for_library):
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("3D") == "Available":
            CheckOutExtension("3D")
        else:
            # raise a custom exception
            raise LicenseError

        if not lidar_format.startswith(
                "."):  # Ensure lidar format contains a format decimal
            lidar_format = ".{}".format(lidar_format)
        supported_lidar_formats = [".las", ".zlas"]
        assert lidar_format in supported_lidar_formats, \
            "LiDAR format {0} unsupported. Ensure LiDAR format is in {1}".format(lidar_format, supported_lidar_formats)

        lidar_tiles = [
            f for f in listdir(in_lidar_folder)
            if f.endswith("{}".format(lidar_format))
        ]
        if len(lidar_tiles) < 1:
            AddError("No LiDAR tiles detected in input directory")
        count = 0
        for tile in lidar_tiles:
            AddMessage("processing lidar tile {0} of {1} : {2}".format(
                count + 1, len(lidar_tiles), tile))
            lidar_tile = join(in_lidar_folder, tile)
            las_tile_to_numpy_pandas(lidar_tile, sr, returns, class_codes,
                                     format_for_library)
            count += 1
        AddMessage("processing {} lidar tiles complete".format(count))

        # Check back in 3D Analyst license
        CheckInExtension("3D")
    except LicenseError:
        AddError("3D Analyst license is unavailable")
    except ExecuteError:
        AddError("3D Analyst license is unavailable")
        print(GetMessages(2))
Beispiel #8
0
        def wrapper(*args, **kwargs):
            try:
                return func(*args, **kwargs)

            except:
                # Get the traceback object
                tb = sys.exc_info()[2]
                tbinfo = traceback.format_tb(tb)[0]

                # Concatenate information together concerning the error into a message string
                pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
                    sys.exc_info()[1])
                msgs = "\n************* \n{}\n************* \n".format(
                    GetMessages(2))

                # Return python error messages for use in script tool or Python window
                AddError(pymsg)
                AddError(msgs)

                # Print Python error messages for use in Python / Python window
                log.exception(pymsg)
                log.exception(msgs)
def msg(text, arc_status=None, set_progressor_label=False):
    """
    output messages through Click.echo (cross-platform shell printing) 
    and the ArcPy GP messaging interface and progress bars
    """
    click.echo(text)

    if arc_status == "warning":
        AddWarning(text)
    elif arc_status == "error":
        AddError(text)
    else:
        AddMessage(text)

    if set_progressor_label:
        SetProgressorLabel(text)
Beispiel #10
0
def ExportCANPLyrs():
    print "exporting initialized at " + str(datetime.datetime.now())
    destConnection = r"D:\SQL61_GIS_CANSYS.sde"
    TableToTable_conversion("MAP_EXTRACT",
                            r"Database Connections/SQL61_GIS_CANSYS.sde",
                            "Map_Extract", "#", "#", "#")
    for lyr in mapping.ListLayers(mxd):
        if lyr.name in CANPlist:
            try:
                lyrname = lyr.name[11:]
                print lyrname + " exporting..."
                outlyrname = "V_" + lyrname
                outlyrobj = destConnection + "\\GIS_CANSYS.DBO." + outlyrname
                FeatureClassToFeatureClass_conversion(lyr, destConnection,
                                                      outlyrname, "#", "#",
                                                      "#")
                ChangePrivileges_management(outlyrobj, "readonly", "GRANT",
                                            "AS_IS")
                print lyrname + " exported to " + outlyrname + " " + str(
                    datetime.datetime.now())
            except ExecuteError:
                msgs = GetMessages(2)
                AddError(msgs)
                print msgs
                pass
            except (RuntimeError, TypeError, NameError):
                print "TypeError on item" + lyr.name
                pass
            except:
                tb = sys.exc_info()[2]
                tbinfo = traceback.format_tb(tb)[0]
                # Concatenate information together concerning the error into a message string
                #
                pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
                    sys.exc_info()[1])
                msgs = "ArcPy ERRORS:\n" + GetMessages(2) + "\n"
                print pymsg + "\n"
                print msgs

        else:
            #print lyr.name +" was not in the export list and will be skipped"
            pass
def seamless_texture(inImg, outImg):
    from os.path import isfile
    try:
        from PIL import Image
    except ModuleNotFoundError:
        from arcpy import AddError
        AddError(
            "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
        )
        print(
            "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
        )
        exit()

    # seamless version already exists, dont regenerate
    if isfile(outImg):
        print("Seamless image already exists, ignoring {0}...".format(inImg))
        exit()

    img = Image.open(inImg)
    print("Converting {0}...".format(inImg))
    sz = img.size
    region = []
    for i in range(4):
        region += [img.crop((0, 0, sz[0], sz[1]))]
    img = img.resize((sz[0] * 2, sz[1] * 2))

    region[1] = region[1].transpose(Image.FLIP_TOP_BOTTOM)

    region[2] = region[2].transpose(Image.FLIP_LEFT_RIGHT)

    region[3] = region[3].transpose(Image.FLIP_TOP_BOTTOM)
    region[3] = region[3].transpose(Image.FLIP_LEFT_RIGHT)

    img.paste(region[0], (0, 0, sz[0], sz[1]))
    img.paste(region[1], (0, sz[1], sz[0], sz[1] * 2))
    img.paste(region[2], (sz[0], 0, sz[0] * 2, sz[1]))
    img.paste(region[3], (sz[0], sz[1], sz[0] * 2, sz[1] * 2))
    img.save(outImg)
Beispiel #12
0
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages, AddMessage
    from arcpy.management import BuildPyramids

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError
        try:
            from PIL import Image
        except ModuleNotFoundError:
            from arcpy import AddError
            AddError(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            print(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            exit()

        mask_image(in_image, in_mask, in_texture, out_image, method,
                   blur_distance)
        AddMessage("Building Pyramids")
        BuildPyramids(out_image, -1, "NONE", "NEAREST", "DEFAULT", 75,
                      "OVERWRITE")
        CheckInExtension("ImageAnalyst")

    except LicenseError:
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages, AddError

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError

        batch_create_tiled_ortho_mosaics(in_folder, image_format, num_bands,
                                         pixel_depth, product_definition,
                                         product_band_definitions, pixel_size,
                                         out_folder)

        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("Image Analyst license is unavailable")
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
Beispiel #14
0
def geocodeHardcodedAddresses():
    try:
        setGlobals()
        address = {}

        #single line input example
##        address['SingleLine'] = '2020 SW 32nd Street, Topeka, KS 66611'
##        xy = geoCodeAddress(address)
##        if xy != None:
##            print str(xy[0]) + ' ' + str(xy[1])

        #parsed examples
        address['Street'] =  '2020 SW 32nd Street'
        address['City'] = 'Topeka'
        address['State']= 'KS'
        address['Zip']= '66611'
        xy = geoCodeAddress(address)
        if xy != None:
            print str(xy[0]) + ' ' + str(xy[1]) + ' ' + address['Street'] + ' ' + address['City']

        address['Street'] =  '2010 SW 32nd Street'
        address['City'] = 'Topeka'
        address['State']= 'KS'
        address['Zip']= '66611'
        xy = geoCodeAddress(address)
        if xy != None:
            print str(xy[0]) + ' ' + str(xy[1]) + ' ' + address['Street'] + ' ' + address['City']

        address['Street'] =  '2020 SW 32nd terrace'
        address['City'] = 'Topeka'
        address['State']= 'KS'
        address['Zip']= '66611'
        xy = geoCodeAddress(address)
        if xy != None:
            print str(xy[0]) + ' ' + str(xy[1]) + ' ' + address['Street'] + ' ' + address['City']

        address['Street'] =  '5175 Tuttle Cove Rd'
        address['City'] = 'Manhattan'
        address['State']= 'KS'
        address['Zip']= '66502'
        xy = geoCodeAddress(address)
        if xy != None:
            print str(xy[0]) + ' ' + str(xy[1]) + ' ' + address['Street'] + ' ' + address['City']

        address['Street'] =  '2800 SW TOPEKA BLVD'
        address['City'] = 'TOPEKA'
        address['State']= 'KS'
        address['Zip']= '66611'
        xy = geoCodeAddress(address)
        if xy != None:
            print str(xy[0]) + ' ' + str(xy[1]) + ' ' + address['Street'] + ' ' + address['City']


        address['Street'] =  ''
        address['City'] = 'Topeka'
        address['State']= 'KS'
        address['Zip']= ''
        xy = geoCodeAddress(address)
        if xy != None:
            print str(xy[0]) + ' ' + str(xy[1]) + ' ' + address['Street'] + ' ' + address['City']

    except:

        # Return any python specific errors and any error returned by the geoprocessor
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]
        pymsg = "Python Errors:\nTraceback Info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_type)+ ": " + str(sys.exc_value) + "\n"
        gpmsg = "GP ERRORS:\n" + GetMessages(2) + "\n"


        print pymsg
        print gpmsg
        AddError(pymsg)
        AddError(gpmsg)
Beispiel #15
0
def publish2ago(folder, agoitem, gis, metadataurl, prjPath, prj, user, guid):

    # Set sharing options
    shrOrg = False
    shrEveryone = False
    shrGroups = ''
    tags = "BC; British Columbia; Canada; "
    credits = "DataBC, GeoBC"
    use_limits = r'<p>This web service is <a href="http://www2.gov.bc.ca/gov/content/home/copyright">Copyright Province of British Columbia</a>.  All rights reserved.</p>  <p>The B.C. Map Hub and associated materials, including map applications (&quot;Maps&quot;), trade-marks and official marks (collectively, &quot;Materials&quot;), are owned or used under license by the Province of British Columbia (&quot;Province&quot;) and are protected by copyright and trade-mark laws.  Please see the <a href="https://www2.gov.bc.ca/gov/content?id=14CE6DD7756F402287618963D936BE44">Disclaimer</a> for further details.</p>  <p>The Province does not collect, use or disclose personal information through the ArcGIS Online website.  Please be aware, however, that IP addresses are collected by Esri and are stored on Esris servers located outside of Canada.  For further information, including the purposes for which your IP address is collected,  please see Esris Privacy Policy at:  <a>http://www.esri.com/legal/privacy</a>.   By accessing or using the B.C. Map Hub, you consent, effective as of the date of such access or use, to Esri storing and accessing your IP address outside of Canada for the purposes described in Esris Privacy Policy.  Should you have any questions about the collection of your IP address, please contact BCGov AGOL CONTACT at [email protected], PO BOX 9864 STN PROV GOVT, Victoria BC  V8W 9T5</p>'

    # Local paths to create temporary content
    relPath = os.path.dirname(prjPath)
    print("Path to store temporary service definitions: " + relPath)
    #print('Creating SD file')
    arcpy.env.overwriteOutput = True
    prj = arcpy.mp.ArcGISProject(prjPath)

    arcpy.env.workspace = relPath
    print(prj.filePath)

    try:
        for m in prj.listMaps():
            #print("Map: " + m.name)

            lyrs = m.listLayers()
            start_time = time.time()
            for lyr in lyrs:
                cnt = 0
                if lyr.name.strip() == agoitem.title.strip():
                    cnt = 1
                    print('APRX Layer name: ' + lyr.name)
                    print('AGO item title: ' + agoitem.title)

                    print('Search for original SD on portal')
                    sdItem = gis.content.search(
                        query="title:" + agoitem.title + " AND owner:" + user,
                        item_type='Service Definition')[0]

                    if sdItem:
                        print("found existing AGO service definiton for:  " +
                              sdItem.title)

                        cnt = cnt + 1
                    else:
                        print('did not find service definition for: ' +
                              lyr.name)

                    sddraft = os.path.join(relPath,
                                           "temp.sddraft").replace("/", "\\")
                    #sddraft="temp.sddraft"
                    sd = os.path.join(relPath, "temp.sd").replace("/", "\\")
                    sd = "temp.sd"
                    print("sddraft: " + sddraft)
                    if os.path.exists(sddraft):
                        os.remove(sddraft)
                        print("removing " + sddraft)
                    if os.path.exists(sd):
                        os.remove(sd)
                        print("removing " + sd)
                    now = time.ctime(int(time.time()))
                    #bcdc_package = bcdc.find_package_for_feature_class(feat_class)

                    summary = "Summary from BCDC Last Updated - " + now
                    print(summary)
                    tags = tags + lyr.name
                    description = "Description from BCDC"

                    #print(sddraft, lyr.name, Folder,summary,tags,description,credits,use_limits)

                    print('Create SDDRAFT')
                    #CreateWebLayerSDDraft (map_or_layers, out_sddraft, service_name, {server_type}, {service_type}, {folder_name}, {overwrite_existing_service}, {copy_data_to_server}, {enable_editing}, {allow_exporting}, {enable_sync}, {summary}, {tags}, {description}, {credits}, {use_limitations})
                    print(
                        arcpy.mp.CreateWebLayerSDDraft(
                            lyr,
                            sddraft,
                            lyr.name,
                            'MY_HOSTED_SERVICES',
                            'FEATURE_ACCESS',
                            folder_name=folder,
                            overwrite_existing_service=True,
                            copy_data_to_server=True,
                            enable_editing=False,
                            allow_exporting=False,
                            enable_sync=False,
                            summary=summary,
                            tags=tags,
                            description=description,
                            credits=credits,
                            use_limitations=use_limits))
                    print('Stage SDDRAFT')
                    print(sddraft, sd)
                    print(arcpy.StageService_server(sddraft, sd))

                    if not sdItem:
                        sdItem = arcpy.UploadServiceDefinition_server(
                            sd,
                            in_server='My Hosted Services',
                            in_folder_type='FROM_SERVICE_DEFINITION',
                            in_startupType='STARTED',
                            in_override='USE_DEFINITION')
                        #print("Uploading SD: {}, ID: {} ....".format(sdItem.title, sdItem.id))
                        print(sdItem.update(data=sd))
                        fs = sdItem.publish(overwrite=True)
                        print(
                            'Found Feature Class: {}, ID: {} Uploading and overwriting'
                            .format(lyr.name, user))
                    else:
                        #print("Found SD: {}, ID: {} n Uploading and overwriting..".format(sdItem.title, sdItem.id))
                        print(sdItem.update(data=sd))
                        print("Overwriting existing feature service..")
                        fs = sdItem.publish(overwrite=True)

                    if shrOrg or shrEveryone or shrGroups:
                        print("Setting sharing options")
                        fs.share(org=shrOrg,
                                 everyone=shrEveryone,
                                 groups=shrGroups)

                    print("Finished updating: {}  ID: {}".format(
                        fs.title, fs.id))
                    elapsed_time = start_time - time.time()
                    print('Elapsed Time: ' + str(elapsed_time))
                    now = time.ctime(int(time.time()))
                    print('Time: ' + now)
                    print('')

    except RuntimeError as ex:
        if len(ex.args) > 0:
            AddError("Error creating %s. %s" % (relPath, ex.args))
        else:
            AddError("Error creating %s. No further info provided." % relPath)
Beispiel #16
0
def process():
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("3D") == "Available":
            CheckOutExtension("3D")
        else:
            # raise a custom exception
            raise LicenseError

        # Constants - DO NOT MODIFY
        split_area = "split_area"
        orig_area = "orig_area"

        def calc_area(in_fc, field_name):
            AddField(in_fc, field_name, "DOUBLE")
            with da.UpdateCursor(in_fc, [field_name, "SHAPE@AREA"]) as cursor1:
                for r1 in cursor1:
                    r1[0] = r1[1]
                    cursor1.updateRow(r1)

        def field_exists(in_fc, in_field):
            from arcpy import ListFields
            if in_field in [f.name for f in ListFields(in_fc)]:
                return True
            else:
                return False

        def delete_field_if_exists(in_fc, in_field):
            if field_exists(in_fc, in_field):
                DeleteField(in_fc, in_field)

        assert field_exists(in_buildings, building_fid), \
            "no attribute named {} in feature class".format(building_fid)

        for field in [tile_fid, file_name]:
            delete_field_if_exists(in_buildings, field)

        temp_fp = join("in_memory", "mp_fp")
        ddd.MultiPatchFootprint(in_buildings, temp_fp, "bldg_fid")

        calc_area(in_fc=temp_fp, field_name=orig_area)

        temp_isect = join("in_memory", "temp_isect")
        Intersect(r"{0} #;{1} #".format(temp_fp, in_tiles), temp_isect, "ALL",
                  None, "INPUT")

        # Delete Temporary Multipatch Footprint
        Delete(temp_fp)

        calc_area(in_fc=temp_isect, field_name=split_area)

        temp_isect_asc = join("in_memory", "temp_isect_asc")
        Sort(temp_isect, temp_isect_asc, [[building_fid, "ASCENDING"]])
        # Delete Temporary Intersect Feature Class
        Delete(temp_isect)

        fields = [building_fid, tile_fid, file_name, orig_area, split_area]

        # Generate a list of duplicates
        bldg_list = []
        with da.SearchCursor(temp_isect_asc, building_fid) as cursor2:
            for row in cursor2:
                bldg_list.append(row[0])

        duplicates = [
            item for item, count in Counter(bldg_list).items() if count > 1
        ]

        duplicates_list = []
        for i in duplicates:
            duplicates_list.append([i, bldg_list.count(i)])

        # TODO: Resolve why tile_fid is not showing up below when BuildingFID and TileFID are OID fields. "In_memory" issue
        '''
        # \\ Begin Debug print code
        from arcpy import AddMessage
        fds = [f.name for f in arcpy.ListFields(temp_isect_asc) if f.name in fields]
        AddMessage(fds)
        nfds = [f.name for f in arcpy.ListFields(temp_isect_asc) if f.name not in fields]
        AddMessage(nfds)
        # End Debug pring code //
        '''
        final_list = []
        with da.SearchCursor(temp_isect_asc, fields) as cursor3:
            prev_area = -1
            prev_item_list = []
            item_count = 0
            fcound = 0
            for row in cursor3:
                if row[0] not in duplicates:
                    final_list.append([row[0], row[1], row[2]])
                else:
                    area = row[3] - row[4]
                    index = duplicates.index(row[0])
                    total_items = duplicates_list[index][1]
                    if row[0] == duplicates[
                            0] and item_count == 0:  # Deal with first item differently
                        item_count += 1
                        prev_area = area
                        prev_item_list = [row[0], row[1], row[2]]
                    elif item_count + 1 == total_items:  # Deal with last item in list
                        if prev_area <= area:
                            prev_area = area
                            prev_item_list = [row[0], row[1], row[2]]
                        final_list.append(prev_item_list)
                        item_count = 0
                        prev_area = -1
                        prev_item_list = []
                    elif item_count + 1 != total_items:
                        if prev_area <= area:
                            prev_area = area
                            prev_item_list = [row[0], row[1], row[2]]
                        item_count += 1
        # Append results back to Input Feature Class
        AddField(in_buildings, tile_fid, "LONG")
        AddField(in_buildings, file_name, "TEXT")
        with da.UpdateCursor(in_buildings,
                             [building_fid, tile_fid, file_name]) as cursor:
            for r in cursor:
                for i in final_list:
                    if r[0] == i[0]:
                        r[1] = int(i[1])
                        r[2] = str(i[2])
                cursor.updateRow(r)

        Delete(temp_isect)
        del bldg_list
        del duplicates_list
        del duplicates

        # Check back in 3D Analyst license
        CheckInExtension("3D")
    except LicenseError:
        AddError("3D Analyst license is unavailable")
        print("3D Analyst license is unavailable")
    except ExecuteError:
        AddError("3D Analyst license is unavailable")
        print(GetMessages(2))
Beispiel #17
0
def process():
    def ensure_dir(file_path):
        directory = path.dirname(file_path)
        if not path.exists(directory):
            makedirs(directory)

    def zipper(in_list, out_file_path):
        out_file = '{0}.zip'.format(out_file_path)
        ensure_dir(out_file)
        with zipfile.ZipFile(out_file, 'w') as zipMe:
            for f in in_list:
                arcname = f.replace(path.dirname(out_file_path), "")
                zipMe.write(f, arcname=arcname, compress_type=zipfile.ZIP_DEFLATED)

    def zipper_gdb(in_gdb, out_file_name):
        assert in_gdb.endswith('.gdb'), "Error: file extension {0} not detected in in_folder".format(".gdb")
        root_dir = path.dirname(in_gdb)
        gdb_name = path.basename(in_gdb)
        myzip = zipfile.ZipFile(path.join(root_dir, out_file_name), 'w', zipfile.ZIP_DEFLATED)
        for folder, subfolder, file in walk(path.join(root_dir, gdb_name)):
            for each in subfolder + file:
                source = path.join(folder, each)
                if not source.endswith(".lock"):
                    # remove the absolute path to compose arcname
                    # also handles the remaining leading path separator with lstrip
                    arcname = source[len(root_dir):].lstrip(sep)
                    # write the file under a different name in the archive
                    myzip.write(source, arcname=arcname)
        myzip.close()

    def zip_folder(in_folder, out_file_name):
        myzip = zipfile.ZipFile(path.join(in_folder, out_file_name), 'w', zipfile.ZIP_DEFLATED)
        for folder, subfolder, file in walk(in_folder):
            for each in subfolder + file:
                source = path.join(folder, each)
                # remove the absolute path to compose arcname
                # also handles the remaining leading path separator with lstrip
                arcname = source[len(in_folder):].lstrip(sep)
                # write the file under a different name in the archive
                myzip.write(source, arcname=arcname)
        myzip.close()


    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("3D") == "Available":
            CheckOutExtension("3D")
        else:
            # raise a custom exception
            raise LicenseError

        # Ensure output folder exists
        if not path.exists(out_folder):
            makedirs(out_folder)

        unique_values = set(row[0] for row in da.SearchCursor(in_buildings, tile_fid))
        for v in unique_values:
            print(v)

        for val in unique_values:
            out_name = out_file_basename + "_{0}".format(val)
            if out_format == "Multipatch SHP":
                out_file = path.join(out_folder, out_name+'.shp')
                Select(in_buildings, out_file, "{0} = {1}".format(tile_fid, val))
                if zip_files:
                    stem = path.join(out_folder, out_name)
                    in_list = [out_file,
                               '{}.shp.xml'.format(stem),
                               '{}.shx'.format(stem),
                               '{}.sbx'.format(stem),
                               '{}.sbn'.format(stem),
                               '{}.prj'.format(stem),
                               '{}.dbf'.format(stem),
                               '{}.cpg'.format(stem)]
                    zipper(in_list, stem)
                    Delete(out_file)
            if out_format == "Multipatch GDB":
                gdb = path.join(out_folder, out_name + '.gdb')
                CreateFileGDB(out_folder, out_name + '.gdb')
                out_file = path.join(gdb, out_name)
                Select(in_buildings, out_file, "{0} = {1}".format(tile_fid, val))
                if zip_files:
                    out_zip = out_name + '.zip'
                    zipper_gdb(gdb, out_zip)
                    Delete(gdb)
            if out_format == "DAE":
                folder = path.join(out_folder, out_name)
                # Ensure output folder exists
                if not path.exists(folder):
                    makedirs(folder)
                MakeFeatureLayer(in_buildings, "bldg_layer", "{0} = {1}".format(tile_fid, val), None)
                MultipatchToCollada("bldg_layer", folder, "PREPEND_NONE", "OBJECTID")
                Delete("bldg_layer")
                if zip_files:
                    zip_folder(folder, folder + ".zip")
                    Delete(folder)

        # Check back in 3D Analyst license
        CheckInExtension("3D")
    except LicenseError:
        AddError("3D Analyst license is unavailable")
        print("3D Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
def process():
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError

        # Constants - DO NOT MODIFY
        supported_folder_extensions = [
            'gdb'
        ]  # Currently not using this.... for future needs...

        def ensure_dir(file_path):
            directory = os.path.dirname(file_path)
            if not os.path.exists(directory):
                os.makedirs(directory)

        def zipper(in_list, out_file_path):
            out_file = '{0}.zip'.format(out_file_path)
            ensure_dir(out_file)
            with zipfile.ZipFile(out_file, 'w') as zipMe:
                for f in in_list:
                    zipMe.write(f, compress_type=zipfile.ZIP_DEFLATED)

        def zipper_gdb(in_gdb, out_file_name):
            assert in_gdb.endswith(
                '.gdb'
            ), "Error: file extension {0} not detected in in_folder".format(
                ".gdb")
            root_dir = os.path.dirname(in_gdb)
            gdb_name = os.path.basename(in_gdb)
            myzip = zipfile.ZipFile(os.path.join(root_dir, out_file_name), 'w',
                                    zipfile.ZIP_DEFLATED)
            for folder, subfolder, file in os.walk(
                    os.path.join(root_dir, gdb_name)):
                for each in subfolder + file:
                    source = os.path.join(folder, each)
                    if not source.endswith(".lock"):
                        # remove the absolute path to compose arcname
                        # also handles the remaining leading path separator with lstrip
                        arcname = source[len(root_dir):].lstrip(os.sep)
                        # write the file under a different name in the archive
                        myzip.write(source, arcname=arcname)
            myzip.close()

        def zip_folder(in_folder, out_file_name):
            myzip = zipfile.ZipFile(os.path.join(in_folder, out_file_name),
                                    'w', zipfile.ZIP_DEFLATED)
            for folder, subfolder, file in os.walk(in_folder):
                for each in subfolder + file:
                    source = os.path.join(folder, each)
                    # remove the absolute path to compose arcname
                    # also handles the remaining leading path separator with lstrip
                    arcname = source[len(in_folder):].lstrip(os.sep)
                    # write the file under a different name in the archive
                    myzip.write(source, arcname=arcname)
            myzip.close()

        # TODO: do something with folder-based file structures. ex: GDB .... user the zipper_folder_structure() function above.
        from arcpy import AddMessage

        files_in_dir = []
        for root, dirs, files in os.walk(in_directory):
            for filename in files:
                files_in_dir.append([root, filename])

        file_name_list = []
        files_to_zip = []
        for f in files_in_dir:
            root = f[0]
            filename = f[1]
            file = os.path.join(root, filename)
            file_partitioned = filename.partition('.')[0]
            if file_partitioned not in file_name_list:
                if len(files_to_zip) > 1:
                    out_file_path = files_to_zip[0].replace(
                        in_directory, out_directory).partition('.')[0]
                    zipper(files_to_zip, out_file_path)
                    AddMessage(files_to_zip)
                    files_to_zip = []
                file_name_list.append(file_partitioned)
            else:
                files_to_zip.append(file)
                # If last file in directory for processing
                if root == files_in_dir[-1][0] and filename == files_in_dir[
                        -1][1]:
                    out_file_path = files_to_zip[0].replace(
                        in_directory, out_directory).partition('.')[0]
                    zipper(files_to_zip, out_file_path)

        # Check back in Image Analyst license
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("ImageAnalyst license is unavailable")
        print("ImageAnalyst license is unavailable")
    except ExecuteError:
        AddError(GetMessages(2))
        print(GetMessages(2))
Beispiel #19
0
def main():
    # tool inputs
    INPUT_NETWORK = argv[1]
    INPUT_POINTS = argv[2]
    INPUT_ORIGINS_FIELD = argv[3]
    INPUT_DESTINATIONS_FIELD = argv[4]
    INPUT_COEFF = float(argv[5])
    INPUT_SEARCH_RADIUS = float(argv[6]) if is_number(
        argv[6]) else float('inf')
    INPUT_OUTPUT_DIRECTORY = argv[7]
    INPUT_OUTPUT_FEATURE_CLASS_NAME = argv[8]
    INPUT_COMPUTE_WAYFINDING = argv[9] == "true"
    INPUT_VISUALIZATION = argv[10]

    # check that network has "Length" attribute
    if "Length" not in network_cost_attributes(INPUT_NETWORK):
        AddError("Network <%s> does not have Length attribute" % INPUT_NETWORK)
        return

    # check that coeff is at least 1
    if INPUT_COEFF < 1:
        AddError("Redundancy coefficient <%s> must be at least 1" %
                 INPUT_COEFF)
        return

    # extract origin and destination ids
    origin_ids = flagged_points(INPUT_POINTS, INPUT_ORIGINS_FIELD)
    if len(origin_ids) != 1:
        AddError("Number of origins <%s> must be 1" % len(origin_ids))
        return
    origin_id = origin_ids[0]
    destination_ids = flagged_points(INPUT_POINTS, INPUT_DESTINATIONS_FIELD)
    if len(destination_ids) == 0 or origin_ids == destination_ids:
        AddWarning("No OD pair found, no computation will be done")
        return

    # check that the output file does not already exist
    output_feature_class = "%s.shp" % join(INPUT_OUTPUT_DIRECTORY,
                                           INPUT_OUTPUT_FEATURE_CLASS_NAME)
    if Exists(output_feature_class):
        AddError("Output feature class <%s> already exists" %
                 output_feature_class)
        return

    # obtain visualization method
    visualize_segments = visualize_polylines = False
    if INPUT_VISUALIZATION == "Unique Segments":
        visualize_segments = True
    elif INPUT_VISUALIZATION == "Path Polylines":
        visualize_polylines = True
    elif INPUT_VISUALIZATION != "None":
        AddError("Visualization method <%s> must be one of 'Unique Segments', "
                 "'Path Polylines', or 'None'" % INPUT_VISUALIZATION)
        return

    # setup
    env.overwriteOutput = True

    # construct network and points
    network, points, edge_to_points = construct_network_and_load_buildings(
        INPUT_POINTS, INPUT_NETWORK)

    # find redundant paths for each origin-destination
    AddMessage("Computing redundant paths ...")
    progress_bar = Progress_Bar(len(destination_ids), 1, "Finding paths ...")
    # build output table one row at a time, starting from header row
    answers = [["OrigID", "DestID", "NumPaths", "Redundancy"]]
    if INPUT_COMPUTE_WAYFINDING:
        answers[0].append("Wayfinding")
    # visualization state
    if visualize_polylines:
        polylines = []
        polyline_data = []
    elif visualize_segments:
        all_unique_segment_counts = defaultdict(int)
    for destination_id in destination_ids:
        if origin_id != destination_id:
            all_paths = find_all_paths(network, points, INPUT_COEFF, origin_id,
                                       destination_id, INPUT_SEARCH_RADIUS,
                                       INPUT_COMPUTE_WAYFINDING)
            if all_paths is not None:
                if INPUT_COMPUTE_WAYFINDING:
                    (all_path_points, unique_segment_counts, num_paths,
                     redundancy, waypoint) = all_paths
                    answers.append([
                        origin_id, destination_id, num_paths, redundancy,
                        waypoint
                    ])
                else:
                    (all_path_points, unique_segment_counts, num_paths,
                     redundancy) = all_paths
                    answers.append(
                        [origin_id, destination_id, num_paths, redundancy])
                if visualize_polylines:
                    for i, path_points in enumerate(all_path_points):
                        polylines.append(
                            Polyline(
                                Array([
                                    Point(*coords) for coords in path_points
                                ])))
                        polyline_data.append((origin_id, destination_id, i))
                elif visualize_segments:
                    for edge_id in unique_segment_counts:
                        all_unique_segment_counts[
                            edge_id] += unique_segment_counts[edge_id]
        progress_bar.step()
    AddMessage("\tDone.")

    # write out results
    if len(answers) > 1:
        AddMessage("Writing out results ...")
        # write out to a table
        write_rows_to_csv(answers, INPUT_OUTPUT_DIRECTORY,
                          INPUT_OUTPUT_FEATURE_CLASS_NAME)
        # visualize
        if visualize_polylines:
            CopyFeatures_management(polylines, output_feature_class)
            data_fields = ["OrigID", "DestID", "PathID"]
            for field in data_fields:
                AddField_management(in_table=output_feature_class,
                                    field_name=field,
                                    field_type="INTEGER")
            rows = UpdateCursor(output_feature_class, data_fields)
            for j, row in enumerate(rows):
                row[0], row[1], row[2] = polyline_data[j]
                rows.updateRow(row)
            # create a layer of the polylines shapefile and symbolize
            polylines_layer_name = "%s_layer" % INPUT_OUTPUT_FEATURE_CLASS_NAME
            polylines_layer = "%s.lyr" % join(INPUT_OUTPUT_DIRECTORY,
                                              INPUT_OUTPUT_FEATURE_CLASS_NAME)
            MakeFeatureLayer_management(output_feature_class,
                                        polylines_layer_name)
            SaveToLayerFile_management(polylines_layer_name, polylines_layer,
                                       "ABSOLUTE")
            ApplySymbologyFromLayer_management(
                polylines_layer,
                join(path[0],
                     "Symbology_Layers\sample_polylines_symbology.lyr"))
            add_layer_to_display(polylines_layer)
        elif visualize_segments:
            id_mapping, edges_file = select_edges_from_network(
                INPUT_NETWORK, all_unique_segment_counts.keys(),
                INPUT_OUTPUT_DIRECTORY,
                "%s_edges" % INPUT_OUTPUT_FEATURE_CLASS_NAME)
            AddField_management(in_table=edges_file,
                                field_name="PathCount",
                                field_type="INTEGER")
            rows = UpdateCursor(edges_file, ["OID@", "PathCount"])
            for row in rows:
                row[1] = all_unique_segment_counts[id_mapping[row[0]]]
                rows.updateRow(row)
        AddMessage("\tDone.")
    else:
        AddMessage("No results to write out.")
Beispiel #20
0
####################################
#   File name: get_file_urls_in_s3_bucket.py
#   About: Process for obtainng the urls for all files in a amazon s3 bucket
#   Author: Geoff Taylor | Imagery & Remote Sensing Team | Esri
#   Date created: 01/21/2021
#   Date last modified: 01/26/2021
#   Python Version: 3.7
####################################

import pandas as pd
from arcpy import AddError, CheckExtension, CheckOutExtension, CheckInExtension
# Ensure Amazon AWS boto3 is installed
try:
    from boto3 import client, resource
except ModuleNotFoundError:
    AddError("boto3 required to run process. Detected Boto3 missing.")
    AddError("install boto3 using conda")
    AddError("conda install -c anaconda boto3")
    AddError("learn more on Boto3 https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html#configuration")
    print("boto3 required to run process. Detected Boto3 missing.")
    print("install boto3 using conda")
    print("conda install -c anaconda boto3")
    print("learn more on Boto3 https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html#configuration")



def make_boolean(in_value):
    # Ensure list folders text-based boolean input is converted to boolean
    if in_value.lower() in ['true', '1', 't', 'y', 'yes', 'yeah', 'yup', 'certainly', 'uh-huh']:
        return True
    else:
Beispiel #21
0
    def execute(self, parameters, messages):
        """Runs the script"""

        # Get the user's input
        fc = parameters[0].valueAsText
        field_mappings = parameters[1].valueAsText
        fields = parameters[1].valueAsText.split(';')
        fields.append('SHAPE@XY')
        output_dir = parameters[2].valueAsText
        output_name = parameters[3].valueAsText
        convert_to_wgs84 = self.toBool(parameters[4].valueAsText)
        convert_to_geojson = self.toBool(parameters[5].valueAsText)
        convert_to_kmz = self.toBool(parameters[6].valueAsText)
        convert_to_csv = self.toBool(parameters[7].valueAsText)
        convert_metadata = self.toBool(parameters[8].valueAsText)
        debug = self.toBool(parameters[9].valueAsText)

        # Setup vars
        output_path = output_dir + '\\' + output_name
        shp_output_path = output_dir + '\\shapefile'
        shp_temp_output_path = output_dir + '\\shapefile\\temp\\'
        shapefile = shp_output_path + '\\' + output_name + '.shp'
        temp_shapefile = shp_output_path + '\\temp\\' + output_name + '.shp'

        if debug:
            AddMessage('Field infos:')
            AddMessage(field_mappings)

        try:
            arcpy.Delete_management('temp_layer')
        except:
            if debug:
                AddMessage('Did not have a temp_layer feature ' +
                           'class to delete')

        if not os.path.exists(shp_output_path):
            os.makedirs(shp_output_path)
            if debug:
                AddMessage('Created directory ' + shp_output_path)

        if not os.path.exists(shp_temp_output_path):
            os.makedirs(shp_temp_output_path)
        else:
            for file in os.listdir(shp_temp_output_path):
                file_path = os.path.join(shp_temp_output_path, file)
                try:
                    if os.path.isfile(file_path):
                        os.unlink(file_path)
                except:
                    AddWarning('Unable to delete ' + file +
                               'from the temp folder. This ' +
                               'may become a problem later')
                    pass

        arcpy.MakeFeatureLayer_management(fc, 'temp_layer', '', '',
                                          field_mappings)
        arcpy.CopyFeatures_management('temp_layer', temp_shapefile)

        if convert_to_wgs84:
            AddMessage('Converting spatial reference to WGS84...')
            arcpy.Project_management(
                temp_shapefile, shapefile,
                "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433],METADATA['World',-180.0,-90.0,180.0,90.0,0.0,0.0174532925199433,0.0,1262]]",
                "WGS_1984_(ITRF00)_To_NAD_1983",
                "PROJCS['NAD_1983_StatePlane_Pennsylvania_South_FIPS_3702_Feet',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',1968500.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-77.75],PARAMETER['Standard_Parallel_1',39.93333333333333],PARAMETER['Standard_Parallel_2',40.96666666666667],PARAMETER['Latitude_Of_Origin',39.33333333333334],UNIT['Foot_US',0.3048006096012192]]"
            )
            AddMessage('Projection conversion completed.')
        else:
            AddMessage('Exporting shapefile already in WGS84...')
            arcpy.FeatureClassToShapefile_conversion(temp_shapefile,
                                                     shp_output_path)

        try:
            arcpy.Delete_management('temp_layer')
        except:
            AddError('Unable to delete in_memory feature class')

        AddMessage('Compressing the shapefile to a .zip file...')

        export = Export(output_dir, output_name, debug)

        zip = export.zip()
        if zip:
            AddMessage('Finished creating ZIP archive')

        if convert_to_geojson:
            AddMessage('Converting to GeoJSON...')
            output = output_path + '.geojson'
            geojson = esri2open.toOpen(shapefile,
                                       output,
                                       includeGeometry='geojson')
            if geojson:
                AddMessage('Finished converting to GeoJSON')

        if convert_to_kmz:
            AddMessage('Converting to KML...')
            kmz = export.kmz()
            if kmz:
                AddMessage('Finished converting to KMZ')

        if convert_to_csv:
            AddMessage('Converting to CSV...')
            csv = export.csv()
            if csv:
                AddMessage('Finished converting to CSV')

        if convert_metadata:
            AddMessage('Converting metadata to Markdown ' +
                       'README.md file...')
            md = export.md()
            if md:
                AddMessage('Finished converting metadata to ' +
                           'Markdown README.md file')

        # Delete the /temp directory because we're done with it
        shutil.rmtree(shp_output_path + '\\temp')
        if (debug):
            AddMessage('Deleted the /temp folder because we don\'t' +
                       ' need it anymore')

        return
Beispiel #22
0
def main():
    # tool inputs
    INPUT_NETWORK = argv[1]
    INPUT_POINTS = argv[2]
    INPUT_ORIGINS_FIELD = argv[3]
    INPUT_DESTINATIONS_FIELD = argv[4]
    INPUT_BUILDING_WEIGHTS_FIELD = argv[5]
    INPUT_COEFF = float(argv[6])
    INPUT_SEARCH_RADIUS = float(argv[7]) if is_number(argv[7]) else float('inf')
    INPUT_OUTPUT_DIRECTORY = argv[8]
    INPUT_OUTPUT_FEATURE_CLASS_NAME = argv[9]

    # check that network has "Length" attribute
    if "Length" not in network_cost_attributes(INPUT_NETWORK):
      AddError("Network <%s> does not have Length attribute" % INPUT_NETWORK)
      return

    # check that coeff is at least 1
    if INPUT_COEFF < 1:
      AddError("Redundancy coefficient <%s> must be at least 1" % INPUT_COEFF)
      return

    # if we are given a building weights field, check that it is valid
    if INPUT_BUILDING_WEIGHTS_FIELD == "#":
      INPUT_BUILDING_WEIGHTS_FIELD = ""
    if INPUT_BUILDING_WEIGHTS_FIELD and (INPUT_BUILDING_WEIGHTS_FIELD not in
        fields(INPUT_POINTS)):
      AddError("Building weights field <%s> is not a valid attribute in the "
          "input points <%s>" % (INPUT_BUILDING_WEIGHTS_FIELD, INPUT_POINTS))
      return

    # setup
    env.overwriteOutput = True

    # copy the input points into an output feature class
    AddMessage("Copying input points to output feature class ...")
    input_points_layer = Layer(INPUT_POINTS)
    output_feature_class = "%s.shp" % join(INPUT_OUTPUT_DIRECTORY,
        INPUT_OUTPUT_FEATURE_CLASS_NAME)
    CopyFeatures_management(in_features=input_points_layer,
        out_feature_class=output_feature_class)
    AddMessage("\tDone.")

    # construct network and points
    network, points, edge_to_points = construct_network_and_load_buildings(
        INPUT_POINTS, INPUT_NETWORK, INPUT_BUILDING_WEIGHTS_FIELD)

    # extract origin and destination ids
    origin_ids = flagged_points(INPUT_POINTS, INPUT_ORIGINS_FIELD)
    destination_ids = flagged_points(INPUT_POINTS, INPUT_DESTINATIONS_FIELD)
    if len(origin_ids) == 0 or len(destination_ids) == 0 or (
        len(origin_ids) == 1 and origin_ids == destination_ids):
      AddWarning("No OD pair found, no computation will be done.")

    # compute redundancy index statistics for each origin point
    AddMessage("Computing redundancy indices ...")
    redundancy_indices = {}
    # memoize: computing index from O to D is same as computing it from D to O
    memo = {}
    for origin_id in origin_ids:
      progress_bar = Progress_Bar(len(destination_ids), 1,
          "Computing index for O=%s ..." % origin_id)
      # statistics variables
      tot_redundancy_index = 0
      tot_squared_redundancy_index = 0
      min_redundancy_index = None
      max_redundancy_index = None
      all_unique_segments = set()
      # track the number of destinations for which a numeric redundancy index is
      #     successfully computed
      n = 0
      for destination_id in destination_ids:
        if origin_id != destination_id:
          memo_key = (min(origin_id, destination_id), max(origin_id,
              destination_id))
          if memo_key not in memo:
            memo[memo_key] = find_redundancy_index(network, points,
                edge_to_points, INPUT_COEFF, origin_id, destination_id,
                INPUT_SEARCH_RADIUS, bool(INPUT_BUILDING_WEIGHTS_FIELD))
          if memo[memo_key] is not None:
            n += 1
            redundancy_pair, unique_segments_pair = memo[memo_key]
            min_redundancy_index = (min(min_redundancy_index, redundancy_pair)
                if min_redundancy_index is not None else redundancy_pair)
            max_redundancy_index = (max(max_redundancy_index, redundancy_pair)
                if max_redundancy_index is not None else redundancy_pair)
            tot_redundancy_index += redundancy_pair
            tot_squared_redundancy_index += redundancy_pair * redundancy_pair
            all_unique_segments |= unique_segments_pair
        progress_bar.step()
      if n > 0:
        avg_redundancy_index = tot_redundancy_index / n
        avg_squared_redundancy_index = tot_squared_redundancy_index / n
      else:
        avg_redundancy_index = avg_squared_redundancy_index = 0
      # TODO(mikemeko): work on std computation with better accuracy
      std = sqrt(max(avg_squared_redundancy_index - avg_redundancy_index *
          avg_redundancy_index, 0))
      if min_redundancy_index is None:
        min_redundancy_index = 0
      if max_redundancy_index is None:
        max_redundancy_index = 0
      redundancy_indices[origin_id] = (n, avg_redundancy_index, std,
          min_redundancy_index, max_redundancy_index, all_unique_segments)
    AddMessage("\tDone.")

    # write out redundancy statistics to output feature class
    # delete all points that are not origins from the output feature class
    AddMessage("Writing out results ...")
    int_fields = ["InputID", "Reach"]
    double_fields = ["AvgRedund", "StdRedund", "MinRedund", "MaxRedund"]
    for field in int_fields:
      AddField_management(in_table=output_feature_class, field_name=field,
          field_type="INTEGER")
    for field in double_fields:
      AddField_management(in_table=output_feature_class, field_name=field,
          field_type="DOUBLE")
    rows = UpdateCursor(output_feature_class,
        ["OID@"] + int_fields + double_fields)
    for row in rows:
      oid = row[0]
      if Describe(INPUT_POINTS).extension != "shp":
        # original ids start from 1, but shapefile ids start from 0, so add
        #     1 to shapefile id for correct matching
        oid += 1
      if oid in redundancy_indices:
        n, avg, std, m, M, all_unique_segments = redundancy_indices[oid]
        row[1:] = [oid, n, avg, std, m, M]
        rows.updateRow(row)
      else:
        rows.deleteRow()
    # create a layer of the output feature class, for symbology purposes
    output_layer = "%s.lyr" % join(INPUT_OUTPUT_DIRECTORY,
        INPUT_OUTPUT_FEATURE_CLASS_NAME)
    MakeFeatureLayer_management(in_features=output_feature_class,
        out_layer=INPUT_OUTPUT_FEATURE_CLASS_NAME)
    SaveToLayerFile_management(INPUT_OUTPUT_FEATURE_CLASS_NAME, output_layer,
        "ABSOLUTE")
    # add output feature layer to display after applying symbology
    ApplySymbologyFromLayer_management(output_layer, join(path[0],
        "Symbology_Layers\sample_points_symbology.lyr"))
    add_layer_to_display(output_layer)
    # if there is only one origin, symbolize selected edges
    if _common_id(memo.keys()) and len(all_unique_segments) > 0:
      n, avg, std, m, M, all_unique_segments = redundancy_indices[origin_ids[0]]
      select_edges_from_network(INPUT_NETWORK, all_unique_segments,
          INPUT_OUTPUT_DIRECTORY, "%s_edges" % INPUT_OUTPUT_FEATURE_CLASS_NAME)
    AddMessage("\tDone.")
#-------------------------------------------------------------------------------
# Name:        Data_Updater
# Purpose:      See tool metadata
#
# Author:      groemhildt
#
# Created:     17/03/2016
# Copyright:   (c) groemhildt 2016
#-------------------------------------------------------------------------------
from arcpy import Parameter, AddError
from arcpy.da import UpdateCursor
try:
    from arcpy.mp import ArcGISProject
except ImportError as e:
    AddError('This script requires ArcGIS Pro: {}'.format(e))


def update_layer(layer, field, value, new_value):
    if (field and value):
        where_clause = '{} = \'{}\''.format(field, value)
    else:
        where_clause = None
    try:
        cursor = UpdateCursor(layer, field, where_clause)
    except (TypeError):
        return "Error loading table {}".format(layer)

    try:
        for row in cursor:
            row[0] = new_value
            cursor.updateRow(row)
Beispiel #24
0
def publish(map_name,
            server,
            service_name=None,
            folder=None,
            schema_locks=False,
            overwrite=False,
            feature_access=False,
            feature_capabilities='Map,Query,Data',
            instance_count=1,
            project='CURRENT',
            server_type='STANDALONE_SERVER',
            service_type='MAP_SERVICE'):
    if not service_name:
        service_name = map_name

    # get the map
    sddraft_filename = f'{service_name}.sddraft'
    sddraft_output_filename = join(output, sddraft_filename)
    if type(project) == str:
        pro = ArcGISProject(project)
    else:
        pro = project
    mp = pro.listMaps(map_name)

    if not len(mp):
        AddError(f'could not locate map {map_name} in project {project}')
        return
    mp = mp[0]

    # create service draft and export to draft file
    AddMessage('Creating service draft...')
    service_draft = CreateSharingDraft(server_type, service_type, service_name,
                                       mp)
    service_draft.targetServer = server

    # set folder if necessary
    if folder:
        service_draft.serverFolder = folder

    service_draft.exportToSDDraft(sddraft_output_filename)

    # xml schema modifications
    # open the xml for potential modifications
    doc = read_draft(sddraft_output_filename)

    # set instance count
    set_instance_count(doc, instance_count)

    # enable feature service?
    if feature_access:
        enable_feature_access(doc, feature_capabilities)

    if not schema_locks:
        disable_locking(doc)

        if overwrite:
            enable_overwrite(doc)

    # save the modified xml
    write_draft(sddraft_output_filename, doc)

    # stage service
    AddMessage('Staging service...')
    sd_filename = f"{service_name}.sd"
    sd_output_filename = join(output, sd_filename)
    StageService_server(sddraft_output_filename, sd_output_filename)

    # share to server
    AddMessage('Uploading to server...')
    UploadServiceDefinition_server(sd_output_filename, server)
    AddMessage(f'Successfully published service {service_name} to {server}')
def create_mask(in_raster, in_polygon, out_raster):
    from os import path
    from arcpy import env, EnvManager, ResetEnvironments, AddError
    from arcpy.ia import Con, IsNull
    from arcpy.management import Delete, CopyRaster, GetCount, Clip as ClipRaster, GetRasterProperties
    from arcpy.conversion import PolygonToRaster
    from arcpy.analysis import Clip
    env.overwriteOutput = True

    # Clip raster and apply geometries at Bottom-left ant top-right corners to ensure Raster covers Ortho tile extent
    polygon_clipped = path.join("in_memory", "polygon_clipped")
    Clip(in_polygon, raster_extent_polygon(in_raster), polygon_clipped)
    generate_squares(polygon_clipped, in_raster)

    def is_masked(in_polygon):
        if int(GetCount(in_polygon)[0]) == 1:
            return True, int(GetCount(in_polygon)[0])
        if int(GetCount(in_polygon)[0]) == 2:
            return False, int(GetCount(in_polygon)[0])
        if int(GetCount(in_polygon)[0]) > 2:
            return True, int(GetCount(in_polygon)[0])

    _is_masked = is_masked(polygon_clipped)
    # Set the Environment Extent to the extent of the Ortho-Image as well as other settings to align.
    EnvManager(cellSize=in_raster,
               extent=image_extent(in_raster),
               snapRaster=in_raster)  # , mask=in_raster)
    file, extension = path.splitext(out_raster)
    # Convert the Modified polygon that now covers entire extent of Interest to Raster
    temp_raster = file + "Temp" + ".tif"
    PolygonToRaster(polygon_clipped, "OBJECTID", temp_raster, "CELL_CENTER",
                    "", in_raster)
    Delete(polygon_clipped)
    # Clip the Polygon Raster
    temp_clip_rast = file + "TempClipped" + ".tif"
    ClipRaster(temp_raster, image_extent_2(in_raster), temp_clip_rast,
               in_raster, "-1", "NONE", "MAINTAIN_EXTENT")
    if _is_masked[0]:
        if _is_masked[1] < 4:
            mask_raster = Con(temp_clip_rast, 255, 0, "VALUE = 0")
        else:
            # Deal with Masks covering the entire image
            mask_raster = Con(IsNull(temp_clip_rast), 0, 255, "Value = 0")
            # Deal with Masks covering a corner of image
            if int(
                    GetRasterProperties(mask_raster,
                                        "UNIQUEVALUECOUNT").getOutput(0)) < 2:
                Delete(mask_raster)
                mask_raster = Con(temp_clip_rast, 0, 255,
                                  "VALUE <= {0}".format(_is_masked[1] - 2))
    else:
        mask_raster = Con(temp_clip_rast, 255, 255, "VALUE = 0")
    temp_mask_raster = file + "TempMask" + ".tif"
    mask_raster.save(temp_mask_raster)

    ext = path.splitext(out_raster)[1]

    if "jpg" in ext.lower():
        # Convert the raster to .jpg format
        # Combine the band 3x for final output as RGB
        CopyRaster(temp_mask_raster, out_raster, '', None, '', "NONE",
                   "ColormapToRGB", "8_BIT_UNSIGNED", "NONE", "NONE", "JPEG",
                   "NONE", "CURRENT_SLICE", "NO_TRANSPOSE")
    if "tif" in ext.lower():
        # Convert the raster to .jpg format
        # Combine the band 3x for final output as RGB
        CopyRaster(temp_mask_raster, out_raster, '', None, '', "NONE",
                   "ColormapToRGB", "8_BIT_UNSIGNED", "NONE", "NONE", "TIFF",
                   "NONE", "CURRENT_SLICE", "NO_TRANSPOSE")
    if ext.lower() not in [".tif", ".jpg"]:
        AddError(
            "Process Failed. Currently ony supports .jpg and .tif as output formats"
        )
    # Delete Intermediate Data
    Delete(temp_clip_rast)
    Delete(temp_mask_raster)
    Delete(temp_raster)
    # Reset geoprocessing environment settings
    ResetEnvironments()
Beispiel #26
0
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages, AddError,\
        ListDatasets, env, SetProgressor, SetProgressorLabel, SetProgressorPosition, ResetProgressor, Exists
    from arcpy.management import CreateFileGDB, CreateMosaicDataset, AddRastersToMosaicDataset
    from arcpy import Describe
    from os.path import join, exists
    from os import mkdir, makedirs

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError
        try:
            from PIL import Image
        except ModuleNotFoundError:
            AddError(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            print(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            exit()

        env.workspace = in_mosaic_gdb
        mosaics = ListDatasets("*", "Mosaic")
        file_count = len(mosaics)
        count = 0
        SetProgressor("step", "Begin Processing Files...", 0, file_count, 1)
        if not exists(out_folder):
            makedirs(out_folder)
        fileGDB = join(out_folder, "ortho_mosaics.gdb")
        if not Exists(fileGDB):
            CreateFileGDB(out_folder, "ortho_mosaics.gdb")
        for mosaic in mosaics:
            print("processing mosaic {0} of {1}".format(count + 1, file_count))
            in_mosaic = join(in_mosaic_gdb, mosaic)
            i_list, extent = get_images_and_stats(
                in_mosaic
            )  # Obtain image statistics and info from mosaic for processing
            for i in i_list:  # Check that output folder is not the path of i
                if out_folder == path.dirname(i[0]):
                    AddError(
                        "outFolder cannot be the same folder/directory as images referenced in the mosaic dataset"
                    )
                    exit()
            if not path.exists(out_folder):
                makedirs(out_folder)
            out_tile_folder = join(out_folder, "tiles{}".format(count))
            mkdir(out_tile_folder)
            SetProgressorLabel("Texturing Mosaic {0}...".format(count))
            texture_images(i_list, extent, in_texture, in_polygon,
                           out_tile_folder, method,
                           blur_distance)  # Generate Texture-Masked tiles

            mosaic_name = "tiles{}_".format(count)
            mosaic_dataset = join(fileGDB, mosaic_name)
            SetProgressorLabel(
                "Creating Mosaic Dataset for Tiles of {0}...".format(mosaic))
            sr = Describe(in_mosaic).spatialReference
            CreateMosaicDataset(fileGDB, mosaic_name, sr, num_bands,
                                pixel_depth, product_definition,
                                product_band_definitions)
            SetProgressorLabel(
                "Adding of {0} to Mosaic Dataset...".format(mosaic))
            AddRastersToMosaicDataset(mosaic_dataset, "Raster Dataset",
                                      out_tile_folder, "UPDATE_CELL_SIZES",
                                      "UPDATE_BOUNDARY", "NO_OVERVIEWS", None,
                                      0, 1500, None, '', "SUBFOLDERS",
                                      "ALLOW_DUPLICATES", "NO_PYRAMIDS",
                                      "NO_STATISTICS", "NO_THUMBNAILS", '',
                                      "NO_FORCE_SPATIAL_REFERENCE",
                                      "NO_STATISTICS", None, "NO_PIXEL_CACHE")
            SetProgressorPosition()
            count += 1
        ResetProgressor()
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("Image Analyst license is unavailable")
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
from xml.dom.minidom import parse
from arcpy import GetInstallInfo, AddError
import sys
try:
    from arcpy.mapping import MapDocument, ListDataFrames, ListLayers
    from arcpy import ExportMetadata_conversion
except ImportError as e:
    AddError('This tool requires ArcMap: {}'.format(e))
    print('This tool requires ArcMap: {}'.format(e))


def update_metadata(document='CURRENT'):
    """
    updates metadata in an arcmap document
        document - (optional) the path to an arcmap document or the keyword 'CURRENT'.
            The default is 'CURRENT'
    """
    #set local variables
    dir = GetInstallInfo("desktop")["InstallDir"]
    translator = dir + "Metadata/Translator/ESRI_ISO2ISO19139.xml"
    mxd = MapDocument(document)
    df = ListDataFrames(mxd)
    temp_path = "C:/temp"
    for layer in ListLayers(mxd, "*", df[0]):
        if not layer.isGroupLayer:
            description_text = ""
            path = temp_path + '/' + layer.datasetName + '.xml'
            print(path)
            ExportMetadata_conversion(layer.dataSource, translator, path)
            dom = parse(path)
            fields = ('abstract', 'purpose', 'credit')
Beispiel #28
0
def process():
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError

        # System Parameters
        tile_name = "FileName"

        # Begin Script
        temp_fc = join("in_memory", "temp_fc")
        CopyFeatures(in_fc, temp_fc)
        for f in file_names:
            AddField(temp_fc, f, "TEXT")

        df = pd.read_excel(in_xlsx, index_col=0)

        def attribute_tile(in_feature_class,
                           in_tile_name,
                           in_df,
                           in_name,
                           xlsx_row_name=xlsx_row_name):
            with da.UpdateCursor(in_feature_class,
                                 [in_tile_name, in_name]) as cursor:
                for fc_r in cursor:
                    for df_i, df_r in in_df.iterrows():
                        url = df_r[xlsx_row_name]
                        n = Path(url).stem
                        t_name = fc_r[0]
                        t_n = Path(t_name).stem
                        if n.startswith(in_name) and t_n in n:
                            fc_r[1] = url
                    cursor.updateRow(fc_r)

        # Attribute the LiDAR Derivatives
        for n in file_names:
            attribute_tile(temp_fc, tile_name, df, n)

        def attribute_tile_lidar(in_feature_class,
                                 in_tile_name,
                                 in_df,
                                 in_name,
                                 xlsx_row_name=xlsx_row_name):
            with da.UpdateCursor(in_feature_class,
                                 [in_tile_name, in_name]) as cursor:
                for fc_r in cursor:
                    for df_i, df_r in in_df.iterrows():
                        url = df_r[xlsx_row_name]
                        n = split(url)[1]
                        t_name = fc_r[0]
                        if n == t_name:
                            fc_r[1] = url
                    cursor.updateRow(fc_r)

        # Attribute the LiDAR tile now
        AddField(temp_fc, in_lidar_format, "TEXT")
        attribute_tile_lidar(temp_fc,
                             tile_name,
                             df,
                             in_lidar_format,
                             xlsx_row_name=xlsx_row_name)
        '''
        # Print Fields for debugging/assessing results of above operations
        file_names.append(in_lidar_format)
        print(file_names)
        with da.SearchCursor(temp_fc, file_names) as cursor:
            for fc_r in cursor:
                print(fc_r)
        '''

        # Delete Pandas Dataframe from Memory
        del df

        # Copy in_memory temporary feature class to output location
        CopyFeatures(temp_fc, out_fc)

        # Delete Temporary Feature Class
        Delete(temp_fc)

        # Check back in Image Analyst license
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("ImageAnalyst license is unavailable")
        print("ImageAnalyst license is unavailable")
    except ExecuteError:
        AddError(GetMessages(2))
        print(GetMessages(2))
Beispiel #29
0
def ExportNUSYS(admin_workspace):
    print "exporting initialized at " + str(datetime.datetime.now())
    #set the output database, this could be changed to admin workspace
    destConnection = admin_workspace
    #copy the map extract table to the destination commented 5/8/2014
    #TableToTable_conversion("MAP_EXTRACT",destConnection,"Map_Extract","#","#","#")
    #start the loop for layers in the mxd
    for lyr in mapping.ListLayers(nusys_mxd):
        #continue the loop operations for layers in the NUSYS MXD (unlisted)
        try:
            #manipulate the layer name a little bit
            lyrname = lyr.name
            print lyrname + " exporting..."
            outlyrname = lyrname
            outlyrobj = destConnection + "\\GIS_CANSYS.SHARED." + outlyrname
            #this should prevent ERROR 000258: Layer already exists, even though OverwriteOutput is true
            if Exists(outlyrobj):
                Delete_management(outlyrobj)
            #export the layer to SQL server
            FeatureClassToFeatureClass_conversion(lyr, destConnection,
                                                  outlyrname, "#", "#", "#")
            #this is a total replacement, so grant the necessary administrative privileges
            ChangePrivileges_management(outlyrobj, "readonly", "GRANT",
                                        "AS_IS")
            #tell me what happened
            print lyrname + " exported to " + outlyrname + " " + str(
                datetime.datetime.now())
        except ExecuteError:
            msgs = GetMessages(2)
            AddError(msgs)
            #tell me what went wrong if there was an execute error
            print msgs
            AcceptConnections(admin_workspace, True)
            endingTime = datetime.datetime.now()
            ScriptStatusLogging('CANP_LRS_EXPORT.py', 'ExportNUSYS Function',
                                scriptFailure, startingTime, endingTime,
                                GetMessages(2))

            pass
        except (RuntimeError, TypeError, NameError):
            #tell me if there is a problem with one of the layers
            print "TypeError on item" + lyr.name
            AcceptConnections(admin_workspace, True)
            endingTime = datetime.datetime.now()
            ScriptStatusLogging('CANP_LRS_EXPORT.py', 'ExportNUSYS Function',
                                scriptFailure, startingTime, endingTime,
                                GetMessages(2))

            pass
        except:
            tb = sys.exc_info()[2]
            tbinfo = traceback.format_tb(tb)[0]
            # Concatenate information together concerning the error into a message string

            pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
                sys.exc_info()[1])
            msgs = "ArcPy ERRORS:\n" + GetMessages(2) + "\n"
            print pymsg + "\n"
            print msgs
            AcceptConnections(admin_workspace, True)
            endingTime = datetime.datetime.now()
            ScriptStatusLogging('CANP_LRS_EXPORT.py', 'ExportNUSYS Function',
                                scriptFailure, startingTime, endingTime,
                                GetMessages(2))