Exemple #1
0
def renameService(handler, service, newName):
    ''' Function to rename a service
    Requires Admin user/password, as well as server and port (necessary to construct token if one does not exist).
    service = String of existing service with type seperated by a period <serviceName>.<serviceType>
    newName = String of new service name
    '''

    service = service.strip("--[STOPPED]")
    service = service.strip("--[STARTED]")

    # Check the service name for a folder:
    if "/" in service:
        serviceName = service.split('.')[0].split("/")[1]
        folderName = service.split('.')[0].split("/")[0] + "/"
    else:
        serviceName = service.split('.')[0]
        folderName = ""

    renameService_dict = {
        "serviceName": serviceName,
        "serviceType": service.split('.')[1],
        "serviceNewName": newName
    }

    rename_url = "{}/services/{}/renameService".format(handler.baseURL,
                                                       folderName)
    status = handler.url_request(rename_url, renameService_dict, 'POST')

    if 'success' in status.values():
        arcpy.SetParameter(6, True)
    else:
        arcpy.SetParameter(6, False)
        arcpy.AddError(status)
Exemple #2
0
def createFolder(server, port, adminUser, adminPass, folderName, folderDescription, token=None):
    ''' Function to create a folder
    Requires Admin user/password, as well as server and port (necessary to construct token if one does not exist).
    folderName = String with a folder name
    folderDescription = String with a description for the folder
    If a token exists, you can pass one in for use.  
    '''    
    
    # Get and set the token
    if token is None:    
        token = gentoken(server, port, adminUser, adminPass)    
        
    folderProp_dict = { "folderName": folderName,
                        "description": folderDescription                                            
                      }
    
    folder_encode = urllib.urlencode(folderProp_dict)            
    create = "http://{}:{}/arcgis/admin/services/createFolder?token={}&f=json".format(server, port, token)    
    status = urllib2.urlopen(create, folder_encode).read()

    
    if 'success' in status:
        arcpy.SetParameter(6, True)
    else:
        arcpy.SetParameter(6, False)
        arcpy.AddError(status)
Exemple #3
0
def main(argv=None):
    success = True
    gzSupport.compressGDB(gzSupport.workspace)
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    tables = gzSupport.listDatasets(gzSupport.workspace)
    tNames = tables[0]
    tFullNames = tables[1]
    name = ''

    for dataset in datasets:
        arcpy.env.Workspace = gzSupport.workspace
        name = dataset.getAttributeNode("name").nodeValue
        table = gzSupport.getFullName(name, tNames, tFullNames)
        gzSupport.sourceIDField = dataset.getAttributeNode(
            "sourceIDField").nodeValue
        gzSupport.sourceNameField = dataset.getAttributeNode(
            "sourceNameField").nodeValue
        if not arcpy.Exists(table):
            gzSupport.addError("Feature Class " + table +
                               " does not exist, exiting")
            arcpy.SetParameter(SUCCESS, False)
            return
        if not arcpy.TestSchemaLock(table):
            gzSupport.addError("Unable to obtain a schema lock for " + table +
                               ", exiting")
            arcpy.SetParameter(SUCCESS, False)
            return -1
        desc = arcpy.Describe(table)
        fields = dataset.getElementsByTagName("Field")
        try:
            attrs = [f.name for f in arcpy.ListFields(table)]
            for field in fields:
                arcpy.env.Workspace = gzSupport.workspace
                targetName = gzSupport.getNodeValue(field, "TargetName")
                gzSupport.addGizintaField(table, targetName, field, attrs)

            retVal = setFieldValues(table, fields)
            if retVal == False:
                success = False
            gzSupport.logDatasetProcess(name, "Fields", retVal)
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
            gzSupport.cleanupGarbage()

        except:
            gzSupport.showTraceback()
            success = False
            gzSupport.logDatasetProcess("fieldCalculator", name, False)
        finally:
            arcpy.RefreshCatalog(table)
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    if success == False:
        gzSupport.addError(
            "Errors occurred during process, look in log file tools\\log\\fieldCalculator.log for more information"
        )
    if gzSupport.ignoreErrors == True:
        success = True
    arcpy.SetParameter(SUCCESS, success)
    arcpy.ResetProgressor()
    gzSupport.closeLog()
    return
Exemple #4
0
def function(params):

    try:
        pText = common.paramsAsText(params)

        # Get inputs
        runSystemChecks = common.strToBool(pText[1])
        outputFolder = pText[5]
        yearAFolder = pText[6]
        yearBFolder = pText[7]
        slopeOption = pText[8]
        slopeAngle = pText[9]
        yearARain = pText[10]
        yearBRain = pText[11]
        yearASupport = pText[12]
        yearBSupport = pText[13]

        # Set option for LS-factor
        if slopeOption == 'Calculate based on slope and length only':
            lsOption = 'SlopeLength'

        elif slopeOption == 'Include upslope contributing area':
            lsOption = 'UpslopeArea'

        else:
            log.error('Invalid LS-factor option')
            sys.exit()

        # System checks and setup
        if runSystemChecks:
            common.runSystemChecks()

        # Create output folder
        if not os.path.exists(outputFolder):
            os.mkdir(outputFolder)

        # Set up logging output to file
        log.setupLogging(outputFolder)

        # Call RUSLE_scen_acc function
        RUSLE_scen_acc.function(outputFolder, yearAFolder, yearBFolder,
                                lsOption, slopeAngle, yearARain, yearBRain,
                                yearASupport, yearBSupport)

        # Set up filenames for display purposes
        soilLossA = os.path.join(outputFolder, "soillossA")
        soilLossB = os.path.join(outputFolder, "soillossB")
        soilLossDiff = os.path.join(outputFolder, "soillossDiff")

        arcpy.SetParameter(2, soilLossA)
        arcpy.SetParameter(3, soilLossB)
        arcpy.SetParameter(4, soilLossDiff)

        log.info("RUSLE accounts operations completed successfully")

    except Exception:
        log.exception("RUSLE accounts tool failed")
        raise
Exemple #5
0
def main():
    ''' Call to tool method '''
    try:
        # get/set environment
        env.overwriteOutput = True
        #get min and max range for selected weapon
        cursorFields = [
            inputTypeNameField, inputTypeMinRangeField, inputTypeMaxRangeField
        ]
        with arcpy.da.SearchCursor(inputTable, cursorFields) as cursor:
            for row in cursor:
                if str(inputSelectedType) == str(row[0]):
                    inputMinimumRange = row[1]
                    inputMaximumRange = row[2]

        # Call tool method
        rr = RangeRingUtils.rangeRingsFromMinMax(
            inputCenterFeatures, inputMinimumRange, inputMaximumRange,
            "METERS", inputNumberOfRadials, outputRingFeatures,
            outputRadialFeatures, optionalSpatialReference)

        # Set output
        arcpy.SetParameter(4, rr[0])
        arcpy.SetParameter(5, rr[1])

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages()
        arcpy.AddError(msgs)
        print(msgs)

    except:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
            sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)

        # Print Python error messages for use in Python / Python Window
        print(pymsg + "\n")
        print(msgs)

    finally:
        if len(deleteme) > 0:
            # cleanup intermediate datasets
            if debug == True:
                arcpy.AddMessage("Removing intermediate datasets...")
            for i in deleteme:
                if debug == True: arcpy.AddMessage("Removing: " + str(i))
                arcpy.Delete_management(i)
            if debug == True: arcpy.AddMessage("Done")
Exemple #6
0
def function(params):

    try:
        pText = common.paramsAsText(params)

        runSystemChecks = common.strToBool(pText[1])

        if params[2].name == 'Output_folder':
            outputFolder = pText[2]
        elif params[2].name == 'Land_extent_accounts':
            outputFolder = os.path.join(arcpy.env.scratchFolder, 'LCaccounts')
            LCaccounts = pText[2]

        lcOption = pText[3]
        inputLC = pText[4]
        openingLC = pText[5]
        closingLC = pText[6]
        openingField = pText[7]
        closingField = pText[8]
        lcTable = pText[9]
        lcCodeField = pText[10]
        lcNameField = pText[11]

        # System checks and setup
        if runSystemChecks:
            common.runSystemChecks()

        # Create output folder
        if not os.path.exists(outputFolder):
            os.mkdir(outputFolder)

        # Set up logging output to file
        log.setupLogging(outputFolder)

        # Call aggregation function
        lcOutputs = land_accounts.function(outputFolder, lcOption, inputLC,
                                           openingLC, closingLC, openingField,
                                           closingField, lcTable, lcCodeField,
                                           lcNameField)

        # Set up filenames for display purposes
        lcOpening = lcOutputs[0]
        lcClosing = lcOutputs[1]
        lcOpeningWithAccounts = lcOutputs[2]
        outCSV = lcOutputs[3]

        arcpy.SetParameter(12, lcOpening)
        arcpy.SetParameter(13, lcClosing)
        arcpy.SetParameter(14, outCSV)

        return lcOpeningWithAccounts, lcClosing, outCSV

        log.info("Land extent accounting operations completed successfully")

    except Exception:
        log.exception("Land extent accounting tool failed")
        raise
Exemple #7
0
    def execute(self, parameters, messages):
        class NoLayerFile(Exception):
            pass

        class NoRasterLayer(Exception):
            pass

        class NoOutput(Exception):
            pass

        try:
            """The source code of the tool."""
            input_source, no_flood_value, baseline_elevation_raster, baseline_elevation_value, outward_buffer, output_polygons = [
                p.valueAsText for p in parameters[:-2]
            ]

            # check if input exists
            if arcpy.Exists(input_source):
                full_path_source = common_lib.get_full_path_from_layer(
                    input_source)
            else:
                raise NoRasterLayer

            # check if input exists
            if arcpy.Exists(baseline_elevation_raster):
                full_path_baseline_raster = common_lib.get_full_path_from_layer(
                    baseline_elevation_raster)
            else:
                full_path_baseline_raster = None

            desc = arcpy.Describe(input_source)

            flood_polygons = create_3Dflood_level.flood_from_raster(
                input_source=full_path_source,
                input_type=desc.dataType,
                no_flood_value=no_flood_value,
                baseline_elevation_raster=full_path_baseline_raster,
                baseline_elevation_value=parameters[3].value,
                outward_buffer=parameters[4].value,
                output_polygons=output_polygons,
                debug=0)

            if flood_polygons:
                if common_lib.get_z_unit(flood_polygons, 0) == "Feet":
                    arcpy.SetParameter(6, flood_polygons)
                else:
                    arcpy.SetParameter(7, flood_polygons)
            else:
                raise NoOutput

        except NoRasterLayer:
            print("Can't find Raster layer. Exiting...")
            arcpy.AddError("Can't find Raster layer. Exiting...")

        except NoOutput:
            print("Can't create output. Exiting...")
            arcpy.AddError("Can't create output. Exiting...")
Exemple #8
0
def modifyLogs(server,
               port,
               adminUser,
               adminPass,
               clearLogs,
               logLevel,
               token=None):
    ''' Function to clear logs and modify log settings.
    Requires Admin user/password, as well as server and port (necessary to construct token if one does not exist).
    clearLogs = True|False
    logLevel = SEVERE|WARNING|INFO|FINE|VERBOSE|DEBUG
    If a token exists, you can pass one in for use.  
    '''

    # Get tand set the token
    if token is None:
        token = gentoken(server, port, adminUser, adminPass)

    # Clear existing logs
    if clearLogs:
        clearLogs = "http://{}:{}/arcgis/admin/logs/clean?token={}&f=json".format(
            server, port, token)
        status = urllib2.urlopen(clearLogs, ' ').read()
        if 'success' in status:
            arcpy.AddMessage("Cleared log files")

    # Get the current logDir, maxErrorReportsCount and maxLogFileAge as we dont want to modify those
    currLogSettings_url = "http://{}:{}/arcgis/admin/logs/settings?f=pjson&token={}".format(
        server, port, token)
    logSettingProps = json.loads(
        urllib2.urlopen(currLogSettings_url, ' ').read())['settings']

    # Place the current settings, along with new log setting back into the payload
    logLevel_dict = {
        "logDir": logSettingProps['logDir'],
        "logLevel": logLevel,
        "maxErrorReportsCount": logSettingProps['maxErrorReportsCount'],
        "maxLogFileAge": logSettingProps['maxLogFileAge']
    }

    # Modify the logLevel
    log_encode = urllib.urlencode(logLevel_dict)
    logLevel_url = "http://{}:{}/arcgis/admin/logs/settings/edit?f=json&token={}".format(
        server, port, token)
    logStatus = json.loads(urllib.urlopen(logLevel_url, log_encode).read())

    if logStatus['status'] == 'success':
        arcpy.AddMessage(
            "Succesfully changed log level to {}".format(logLevel))
        arcpy.SetParameter(6, True)
    else:
        arcpy.AddWarning("Log level not changed")
        arcpy.AddMessage(logStatus)
        arcpy.SetParameter(6, False)
def main():
    try:
        tbx = os.path.join(getToolboxPath(), getToolboxName())
        if not (os.path.exists(tbx)):
            raise Exception("Cannot find toolbox: " + str(tbx))

        # Load required toolboxes
        arcpy.ImportToolbox(tbx, "pdc")

        # Set Geoprocessing environments
        arcpy.env.overwriteOutput = True

        # Process: Calculate Value
        uniqueID = getUniqueID(inputTrackLines, inputTrackIDFieldName)
        arcpy.AddMessage("Using unique ID = " + str(uniqueID))

        # Process: Import Enemy Sightings
        arcpy.AddMessage("Adding Enemy Sightnings...")
        arcpy.ImportEnemySightingsXML_pdc(inputPatrolReportXML, uniqueID,
                                          inputEnemySightingsTable)

        # Process: Import Patrol Rpt XML
        arcpy.AddMessage("Adding Patrol Report...")
        arcpy.ImportPatrolRptXML_pdc(inputPatrolReportXML, uniqueID,
                                     inputPatrolReportTable)

        arcpy.AddMessage("Done!")
        arcpy.SetParameter(5, inputEnemySightingsTable)
        arcpy.SetParameter(6, inputPatrolReportTable)

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages()
        arcpy.AddError(msgs)

        # return a system error code
        sys.exit(-1)

    except Exception as e:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
            sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        print(pymsg)
        arcpy.AddError(msgs)
        print(msgs)
def main():
    try:
        # get/set environment
        env.overwriteOutput = True

        # Call tool method

        rr = RangeRingUtils.rangeRingsFromInterval(inputCenterFeatures,
                                                   inputNumberOfRings,
                                                   inputDistanceBetween,
                                                   inputDistanceUnits,
                                                   inputNumberOfRadials,
                                                   outputRingFeatures,
                                                   outputRadialFeatures,
                                                   optionalSpatialReference)
        # Set output
        arcpy.SetParameter(5, rr[0])
        arcpy.SetParameter(6, rr[1])

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages()
        arcpy.AddError(msgs)
        print(msgs)

    except:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)

        # Print Python error messages for use in Python / Python Window
        print(pymsg + "\n")
        print(msgs)

    finally:
        if len(deleteme) > 0:
            # cleanup intermediate datasets
            if debug == True: arcpy.AddMessage("Removing intermediate datasets...")
            for i in deleteme:
                if debug == True: arcpy.AddMessage("Removing: " + str(i))
                arcpy.Delete_management(i)
            if debug == True: arcpy.AddMessage("Done")
Exemple #11
0
def main():
    try:
        # get/set environment
        env.overwriteOutput = True

        VisibilityUtilities.linearLineOfSight(
            inputObserverFeatures, inputObserverHeight, inputTargetFeatures,
            inputTargetHeight, inputSurface, outputLineOfSight,
            outputSightLines, outputObservers, outputTargets,
            inputObstructionFeatures)

        # Set output
        arcpy.SetParameter(5, outputLineOfSight)
        arcpy.SetParameter(6, outputSightLines)
        arcpy.SetParameter(7, outputObservers)
        arcpy.SetParameter(8, outputTargets)

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages()
        arcpy.AddError(msgs)
        print(msgs)

    except:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
            sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)

        # Print Python error messages for use in Python / Python Window
        print(pymsg + "\n")
        print(msgs)

    finally:
        if len(deleteme) > 0:
            # cleanup intermediate datasets
            if debug == True:
                arcpy.AddMessage("Removing intermediate datasets...")
            for i in deleteme:
                if debug == True: arcpy.AddMessage("Removing: " + str(i))
                arcpy.Delete_management(i)
            if debug == True: arcpy.AddMessage("Done")
Exemple #12
0
def function(params):

    try:
        pText = common.paramsAsText(params)

        # Get inputs
        runSystemChecks = common.strToBool(pText[1])
        outputFolder = pText[2]
        inputRaster = pText[5]
        aggregationZones = pText[6]
        aggregationColumn = pText[7]

        rerun = False

        # Create output folder
        if not os.path.exists(outputFolder):
            os.mkdir(outputFolder)

        # System checks and setup
        if runSystemChecks:
            common.runSystemChecks(outputFolder, rerun)

        # Set up logging output to file
        log.setupLogging(outputFolder)

        # Set up progress log file
        progress.initProgress(outputFolder, rerun)

        # Write input params to XML
        common.writeParamsToXML(params, outputFolder)

        # Call zonal statistics function
        CalcZonal.function(outputFolder, inputRaster, aggregationZones,
                           aggregationColumn)

        # Set up filenames for display purposes
        outRaster = os.path.join(outputFolder, 'statRaster')
        outTable = os.path.join(outputFolder, 'statTable.dbf')

        # Set up outputs
        arcpy.SetParameter(3, outRaster)
        arcpy.SetParameter(4, outTable)

        log.info("Zonal statistics operations completed successfully")

    except Exception:
        log.exception("Zonal statistics tool failed")
        raise
Exemple #13
0
def CleanFlowline(output_workspace, stream_network, smooth_tolerance):
    # Set environment variables
    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = output_workspace

    # List parameter values
    arcpy.AddMessage("Workspace: {}".format(arcpy.env.workspace))
    arcpy.AddMessage("Stream Network: "
                     "{}".format(arcpy.Describe(stream_network).baseName))

    # Dissolve by `ReachName` field
    stream_network_dissolve = os.path.join(output_workspace,
                                           "stream_network_dissolve")
    arcpy.Dissolve_management(in_features=stream_network,
                              out_feature_class=stream_network_dissolve,
                              dissolve_field=["ReachName"],
                              unsplit_lines="DISSOLVE_LINES")

    arcpy.AddMessage("Stream Network Dissolved")

    # Smooth the stream network
    flowline = os.path.join(output_workspace, "flowline")
    arcpy.SmoothLine_cartography(in_features=stream_network_dissolve,
                                 out_feature_class=flowline,
                                 algorithm="PAEK",
                                 tolerance=smooth_tolerance)

    arcpy.AddMessage("Stream Network Smoothed")

    # Return
    arcpy.SetParameter(3, flowline)

    # Cleanup
    arcpy.Delete_management(in_data=stream_network_dissolve)
Exemple #14
0
def create_zip():
    try:
        ### Create output maps zipped file ###
        arcpy.AddMessage("Creating output zipped folder with all output...")
        files = [
            os.path.join(arcpy.env.scratchFolder, file)
            for file in os.listdir(arcpy.env.scratchFolder)
            if (os.path.isfile(os.path.join(arcpy.env.scratchFolder, file))
                and not file.endswith((".csv", ".xml")))
        ]
        #create zipfile object as speficify 'w' for 'write' mode
        myzip = zipfile.ZipFile(
            os.path.join(arcpy.env.scratchFolder, 'output_maps.zip'), 'w')
        # LOOP through the file list and add to the zip file
        for zip_file in files:
            myzip.write(zip_file,
                        os.path.basename(zip_file),
                        compress_type=zipfile.ZIP_DEFLATED)

    except Exception:
        e = sys.exc_info()[1]
        arcpy.AddError('An error occurred: {}'.format(e.args[0]))

    arcpy.SetParameter(
        10, os.path.join(arcpy.env.scratchFolder, 'output_maps.zip'))
def arcpyPost(pnts, startPnt):
    lyr = arcpy.GetParameterAsText(2)
    ids = [str(p.eid - 1) for p in pnts]
    whereclause = '"FID" in ({0})'.format(",".join(ids))
    lookup = {}
    for p in pnts:
        if lookup.get(p.eid - 1) is not None:
            lookup[p.eid - 1].append(p)
        else:
            lookup[p.eid - 1] = [p]
    arcpy.MakeFeatureLayer_management(FlowlineShp, FlowlineLyr1, whereclause)
    #if fids have duplicates, shp file based query will remove duplicats and only return once for this id
    with arcpy.da.SearchCursor(FlowlineLyr1, ["FID", "SHAPE@"]) as cursor:
        for row in cursor:
            pntl = lookup[row[0]]
            for pnt in pntl:
                if pnt.percent == 1:
                    pnt.pointGeom = arcpy.PointGeometry(row[1].lastPoint)
                else:
                    pnt.pointGeom = row[1].positionAlongLine(pnt.percent, True)

    arcpy.CreateFeatureclass_management("in_memory", os.path.basename(lyr),
                                        "POINT", None, "DISABLED", "DISABLED",
                                        spatial_reference)
    arcpy.AddField_management(lyr, "cost", "DOUBLE")
    with arcpy.da.InsertCursor(lyr, ['SHAPE@', 'cost']) as cursor:
        for p in pnts:
            cursor.insertRow([p.pointGeom, p.cat])
        cursor.insertRow([startPnt, 0])
    arcpy.SetParameter(2, lyr)
    arcpy.Delete_management(FlowlineLyr)
def main(argv=None):
    xmlDoc = dla.getXmlDoc(xmlFileName)
    targetName = dla.getTargetName(xmlDoc)
    success = calculate(xmlFileName, dla.workspace, targetName, False)
    if success == False:
        dla.addError("Errors occurred during field calculation")
    arcpy.SetParameter(SUCCESS, success)
Exemple #17
0
            def do_manage(tool, parameters, messages):
                cls.logger.info('Running ' + cls.pckg_name__ + ' version ' +
                                cls.version__ + ' released on ' +
                                str(cls.release_date__))
                vc = cls.check_version()
                if vc:
                    cls.logger.warning(
                        'You are running an old toolbox version. A newer one ({0}) is available for download at {1}'
                        .format('.'.join(vc), cls.toolbox_repository__))
                cls.logger.info('__Starting ' + tool.__class__.__name__ + '__')

                output = func(tool, parameters, messages)
                if output and parameters[
                        -1].parameterType == 'Derived' and not parameters[
                            -1].value:
                    arcpy.SetParameter(len(parameters) - 1, output)

                if output_store_key is not None and output:
                    if isinstance(output_store_key, str):
                        out_keys = [output_store_key]
                    else:
                        out_keys = output_store_key  #BUG if overwriting the same variable it complains about being undefined (probably gets lost in this function scope?!)
                    if not isinstance(output, (list, tuple)):
                        output = [output]
                    if isinstance(out_keys, list):
                        if len(out_keys) != len(output):
                            raise RuntimeError(
                                "Could not manage output. Expected {0} output but received {1}"
                                .format(len(out_keys), len(output)))
                        else:
                            for o, k in zip(output, out_keys):
                                cls.manage_output(k, o)
                return
Exemple #18
0
def main(argv=None):
    # main function - list the datasets and delete rows
    success = True
    try:
        names = gzSupport.listDatasets(sourceGDB)
        tNames = names[0]
        tFullNames = names[1]
        arcpy.SetProgressor("Step", "Deleting rows...", 0, len(tFullNames), 1)
        i = 0
        for name in tFullNames:
            arcpy.SetProgressorPosition(i)
            arcpy.SetProgressorLabel(" Deleting rows in " + name + "...")
            # for each full name
            if len(datasetNames) == 0 or tNames[i].upper() in datasetNames:
                retVal = doTruncate(name)
                gzSupport.logDatasetProcess(name, "deleteRowsGDB", retVal)
                if retVal == False:
                    success = False
            else:
                gzSupport.addMessage("Skipping " + tNames[i])
            i += i
    except:
        gzSupport.showTraceback()
        gzSupport.addError(pymsg)
        success = False
        gzSupport.logDatasetProcess(name, "deleteRowsGDB", success)
    finally:
        arcpy.SetParameter(SUCCESS, success)
        arcpy.ResetProgressor()
        gzSupport.closeLog()
        arcpy.ClearWorkspaceCache_management(sourceGDB)
Exemple #19
0
    def __build_pdf(self):
        try:
            arcpy.AddMessage("SimpleDocTemplate")
            uniqueID = str(uuid.uuid1())
            path_report = os.path.join(arcpy.env.scratchWorkspace, 'report_{0}.pdf'.format(uniqueID))
            path_report_merged = os.path.join(arcpy.env.scratchWorkspace, 'report_{0}_merged.pdf'.format(uniqueID))

            arcpy.AddMessage(path_report)
            doc = SimpleDocTemplate(path_report, pagesize=A4,showBoundary=0, leftMargin=0,rightMargin=0, topMargin=95, bottomMargin=45, allowSplitting=1)

            doc.build(self.report_elements)
            arcpy.AddMessage("INIT MERGE")
          
            merger = PdfFileMerger()
            merger.append(path_report)
            merger.append(self.web_map_as_pdf)

            arcpy.AddMessage("WRITE MERGE")
            
            merger.write(path_report_merged)       
            merger.close()
            arcpy.AddMessage("CLOSE MERGE")
            self.__set_page_number(path_report_merged)

            arcpy.AddMessage("build completed!")
            arcpy.SetParameter(4, path_report_merged)

        except Exception as ex:
            print(ex)
            arcpy.AddMessage(ex.message)
Exemple #20
0
def preview(xmlFileName):
    global source, target, rowLimit

    dla.setWorkspace()
    dla._errCount = 0

    xmlFileName = dla.getXmlDocName(xmlFileName)
    xmlDoc = dla.getXmlDoc(xmlFileName)
    #arcpy.AddMessage("rowLimit = " + str(rowLimit) )
    if rowLimit == "" or rowLimit == None:
        rowLimit = 100

    prj = dla.setProject(xmlFileName, dla.getNodeValue(xmlDoc, "Project"))
    if prj == None:
        dla.addError(
            "Unable to open your project, please ensure it is in the same folder as your current project or your Config file"
        )
        return False

    if source == "" or source == None:
        source = dla.getDatasetPath(xmlDoc, "Source")
    if target == "" or target == None:
        target = dla.getDatasetPath(xmlDoc, "Target")

    if dla.isTable(source) or dla.isTable(target):
        datasetType = 'Table'
    else:
        datasetType = 'FeatureClass'
    dte = datetime.datetime.now().strftime("%Y%m%d%H%M")
    targetName = dla.getDatasetName(target) + dte
    targetDS = os.path.join(dla.workspace, targetName)
    res = dlaExtractLayerToGDB.extract(xmlFileName, rowLimit, dla.workspace,
                                       source, targetDS, datasetType)
    if res == True:
        res = dlaFieldCalculator.calculate(xmlFileName, dla.workspace,
                                           targetName, False)

        if res == True:
            arcpy.env.addOutputsToMap = True
            layer = targetName
            layertmp = targetName + "tmp"
            if arcpy.Exists(layertmp):
                arcpy.Delete_management(layertmp)
            if dla.isTable(targetDS):
                arcpy.MakeTableView_management(targetDS, layertmp)
            else:
                arcpy.MakeFeatureLayer_management(targetDS, layertmp)
            fieldInfo = dla.getLayerVisibility(layertmp, xmlFileName)
            if dla.isTable(targetDS):
                arcpy.MakeTableView_management(targetDS, layer, None,
                                               dla.workspace, fieldInfo)
            else:
                arcpy.MakeFeatureLayer_management(targetDS, layer, None,
                                                  dla.workspace, fieldInfo)
            # should make only the target fields visible
            arcpy.SetParameter(_success, layer)
    else:
        dla.addError("Failed to Extract data")
        print("Failed to Extract data")
    dla.writeFinalMessage("Data Assistant - Preview")
Exemple #21
0
def main(argv = None):
    # main function - list the datasets and delete rows
    success = True
    name = ''
    gzSupport.workspace = sourceGDB
    try:
        if len(datasetNames) == 0:
            names = gzSupport.listDatasets(sourceGDB)
            tNames = names[0]
        else:
            tNames = datasetNames
        arcpy.SetProgressor("Step","Deleting rows...",0,len(tNames),1)
        i = 0
        for name in tNames:
            arcpy.SetProgressorPosition(i)
            arcpy.SetProgressorLabel(" Deleting rows in " + name + "...")
            # for each full name
            if len(datasetNames) == 0 or gzSupport.nameTrimmer(name.upper()) in datasetNames:
                retVal = doTruncate(os.path.join(sourceGDB,name))
                gzSupport.logDatasetProcess("deleteRowsGDB",name,retVal)
                if retVal == False:
                    success = False
            else:
                gzSupport.addMessage("Skipping "  + gzSupport.nameTrimmer(name))
            i = i + i
    except:
        gzSupport.showTraceback()
        gzSupport.addError("Failed to delete rows")
        success = False
        gzSupport.logDatasetProcess("deleteRowsGDB",name,success)
    finally:
        arcpy.SetParameter(SUCCESS, success)
        arcpy.ResetProgressor()
        gzSupport.closeLog()
        arcpy.ClearWorkspaceCache_management(sourceGDB)
Exemple #22
0
def main():
    in_add_features_params = arcpy.GetParameter(0)  # in add features params
    in_add_features_params = in_add_features_params.replace('\\"', '"')
    try:
        in_add_features_params = json.loads(in_add_features_params)
    except:
        arcpy.SetParameter(2, 'Invalid addFeaturesParams JSON, try again')
        exit()

    in_service_url = arcpy.GetParameter(1)

    token = get_portal_token()
    add_features_resp = add_features(in_service_url, in_add_features_params,
                                     token)

    arcpy.SetParameter(2, json.dumps(add_features_resp))
Exemple #23
0
def main(argv=None):
    success = True
    name = ''
    try:
        if not arcpy.Exists(gzSupport.workspace):
            gzSupport.addMessage(gzSupport.workspace +
                                 " does not exist, attempting to create")
            gzSupport.createGizintaGeodatabase()
        else:
            gzSupport.compressGDB(gzSupport.workspace)
        if len(datasets) > 0:
            progBar = len(datasets) + 1
            arcpy.SetProgressor("step", "Importing Layers...", 0, progBar, 1)
            arcpy.SetProgressorPosition()
        for dataset in datasets:
            gzSupport.sourceIDField = dataset.getAttributeNode(
                "sourceIDField").nodeValue
            sourceName = dataset.getAttributeNode("sourceName").nodeValue
            targetName = dataset.getAttributeNode("targetName").nodeValue
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " +
                                     targetName + "...")
            if not arcpy.Exists(sourceLayer):
                gzSupport.addError("Layer " + sourceLayer +
                                   " does not exist, exiting")
                return
            target = os.path.join(gzSupport.workspace, targetName)
            arcpy.env.Workspace = gzSupport.workspace
            if not arcpy.Exists(target):
                gzSupport.addMessage("Feature Class " + target +
                                     " does not exist")
            else:
                arcpy.Delete_management(target)
            try:
                retVal = exportDataset(sourceLayer, targetName, dataset)
                if retVal == False:
                    success = False
            except:
                gzSupport.showTraceback()
                success = False
                retVal = False
            gzSupport.logDatasetProcess(sourceName, targetName, retVal)
        arcpy.SetProgressorPosition()
    except:
        gzSupport.addError("A Fatal Error occurred")
        gzSupport.showTraceback()
        success = False
        gzSupport.logDatasetProcess("extractLayerToGDB", name, False)
    finally:
        arcpy.ResetProgressor()
        arcpy.RefreshCatalog(gzSupport.workspace)
        arcpy.ClearWorkspaceCache_management(gzSupport.workspace)

    if success == False:
        gzSupport.addError(
            "Errors occurred during process, look in log files for more information"
        )
    if gzSupport.ignoreErrors == True:
        success = True
    gzSupport.closeLog()
    arcpy.SetParameter(SUCCESS, success)
Exemple #24
0
def main():
    """ Main Function """
    #   Input Parameters
    layers_to_clip = arcpy.GetParameterAsText(0)
    area_of_interest = arcpy.GetParameter(1)

    #   Check if valid AOI is provided. It should have at least 1 polygon
    #   feature
    aoi_featset = arcpy.FeatureSet()
    aoi_featset.load(area_of_interest)
    aoi_feat_count = int(arcpy.GetCount_management(aoi_featset)[0])

    if aoi_feat_count == 0:
        arcpy.AddError("Provided AOI has no polygon features." +
                       " Please provide valid AOI for analysis.")
        return

    #   Download data for provided layers
    layers_success = loop_req_layers(layers_to_clip, area_of_interest)
    if not layers_success:
        return

    #   Create zip file of generated excel files for output
    output_zip_file = create_zip_folder()
    if not output_zip_file:
        return
    else:
        # Set generated zip file as output
        arcpy.AddMessage("Zip file created at : " + str(output_zip_file))
        arcpy.SetParameter(2, output_zip_file)
def main():
    ''' main '''
    try:
        arcpy.AddMessage("Getting database qualifier string ...")
        qualifierString = GetQualifierName(inputTDSFeatureDataset)
        if DEBUG is True: arcpy.AddMessage("qualifier string: " + qualifierString)

        fqClassesToMerge = [str(qualifierString + i) for i in FEATURECLASSES_TO_MERGE]
        if DEBUG is True: arcpy.AddMessage("fqClassesToMerge: " + str(fqClassesToMerge))

        workspace = os.path.dirname(inputTDSFeatureDataset)
        tdsFeatureClasses = FeatureClassesFromWorkspace(inputTDSFeatureDataset)
        if DEBUG is True: arcpy.AddMessage("tdsFeatureClasses: " + str(tdsFeatureClasses))

        # now go through the list of all of them and see which names
        # match our target list, if so, add them to a new list
        arcpy.AddMessage("Building list of input features ...")
        newList = [str(os.path.join(workspace, os.path.basename(inputTDSFeatureDataset), fc))\
                   for fc in tdsFeatureClasses if fc in fqClassesToMerge]
        if DEBUG is True: arcpy.AddMessage("newList: " + str(newList))

        # output feature class name
        target = os.path.join(inputMAOTWorkspace, "HLZPolyObstacleOutput")
        if DEBUG is True: arcpy.AddMessage("target: " + str(target))

        # merge all FCs into the target FC
        arcpy.AddMessage("Merging features to output (this may take some time)...")
        arcpy.Merge_management(newList, target)

        # set output
        arcpy.AddMessage("Setting output ...")
        arcpy.SetParameter(2, target)

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages()
        arcpy.AddError(msgs)
        # print msgs #UPDATE
        print(msgs)

    except:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)

        # Print Python error messages for use in Python / Python Window
        # print pymsg + "\n" #UPDATE
        print(pymsg + "\n")
        # print msgs #UPDATE
        print(msgs)
Exemple #26
0
def build_feature_class(ships):
    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = wk = arcpy.env.scratchGDB

    sr = arcpy.SpatialReference(4326)

    # if arcpy.Exists(fcname) is False:   # changed - creates new FC each time and overwrites old
    arcpy.CreateFeatureclass_management(wk,
                                        fcname,
                                        "POINT",
                                        spatial_reference=sr)  # NOQA

    arcpy.management.AddFields(
        fcname,
        [
            ["ID", "TEXT", "ID", 50],  # NOQA
            ["NAME", 'TEXT', "NAME", 50],  # NOQA
            ["MMSI", 'TEXT', "MMSI", 20],  # NOQA
            ['SHIP_TYPE', 'TEXT', 'SHIP_TYPE', 50],
            ["SHIP_CLASS", 'TEXT', "SHIP_CLASS", 50],
            ["FLAG", 'TEXT', "FLAG", 10],
            ["UPDATED_AT", 'TEXT', "UPDATED_AT", 100],  # NOQA
            ["GEN_CLASS", 'TEXT', "GEN_CLASS", 50],
            ["IND_CLASS", 'TEXT', "IND_CLASS", 50],
            ["COURSE", "FLOAT", "COURSE", 10]
        ])

    entslist = [
        "ID", "NAME", "MMSI", "SHIP_TYPE", "SHIP_CLASS", "FLAG", "UPDATED_AT",
        "GEN_CLASS", "IND_CLASS", "COURSE", "SHAPE@XY"
    ]  # noqa: E501

    iCur = arcpy.da.InsertCursor(fcname, entslist)

    for ship in ships:
        adds = []
        adds.append(ship.id)
        adds.append(ship.name)
        adds.append(ship.mmsi)
        adds.append(ship.ship_type)
        adds.append(ship.ship_class)
        adds.append(ship.flag)
        adds.append(ship.updated_at)
        adds.append(ship.gen_class)
        adds.append(ship.ind_class)
        course = ship.las_k_pos['course']
        adds.append(course)
        lx = ship.las_k_pos['geometry']['coordinates'][1]
        ly = ship.las_k_pos['geometry']['coordinates'][0]
        adds.append((ly, lx))
        iCur.insertRow(adds)

    fs = arcpy.FeatureSet()
    fs.load(os.path.join(wk, fcname))
    arcpy.SetParameter(1, fs)

    del iCur
Exemple #27
0
def renameService(server,
                  port,
                  adminUser,
                  adminPass,
                  service,
                  newName,
                  token=None):
    ''' Function to rename a service
    Requires Admin user/password, as well as server and port (necessary to construct token if one does not exist).
    service = String of existing service with type seperated by a period <serviceName>.<serviceType>
    newName = String of new service name
    If a token exists, you can pass one in for use.  
    '''

    # Get and set the token
    if token is None:
        token = gentoken(server, port, adminUser, adminPass)

    service = urllib.quote(service.encode('utf8'))

    # Check the service name for a folder:
    if "//" in service:
        serviceName = service.split('.')[0].split("//")[1]
        folderName = service.split('.')[0].split("//")[0] + "/"
    else:
        serviceName = service.split('.')[0]
        folderName = ""

    renameService_dict = {
        "serviceName": serviceName,
        "serviceType": service.split('.')[1],
        "serviceNewName": urllib.quote(newName.encode('utf8'))
    }

    rename_encode = urllib.urlencode(renameService_dict)
    rename = "http://{}:{}/arcgis/admin/services/{}renameService?token={}&f=json".format(
        server, port, folderName, token)
    status = urllib2.urlopen(rename, rename_encode).read()

    if 'success' in status:
        arcpy.SetParameter(6, True)
    else:
        arcpy.SetParameter(6, False)
        arcpy.AddError(status)
Exemple #28
0
def createGzFile(sourceDataset, targetDataset, xmlFileName):

    success = False
    xmlStrSource = writeDocument(sourceDataset, targetDataset, xmlFileName)
    if xmlStrSource != "":
        success = True
    arcpy.SetParameter(gzSupport.successParameterNumber, success)
    arcpy.ResetProgressor()

    return True
Exemple #29
0
def main(argv=None):
    success = False
    xmlStrSource = writeDocument(sourceDataset, targetDataset, xmlFileName)
    if xmlStrSource != "":
        success = True

    arcpy.SetParameter(gzSupport.successParameterNumber, success)
    arcpy.ResetProgressor()
    gzSupport.closeLog()
    return
Exemple #30
0
 def execute(self, parameters, messages):
     """The source code of the tool."""
     self.run_tool(parameters)
     #arcpy.AddMessage(self.recordset)
     #print self.recordset
     arcpy.SetParameter(1, self.recordset)
     '''
         It seems that the SetParameterAsText method generates a arcpy.Result object.
         I would like to instead return a json string...
     '''
     return