def calcValue(row, names, calcString):
    # calculate a value based on source fields and/or other expressions
    outVal = ""
    calcList = calcString.split("|")
    for strVal in calcList:
        if strVal in names:
            try:
                fidx = names.index(strVal)
                if str(row[fidx]) != row[fidx]:
                    outVal += str(row[fidx])
                else:
                    outVal += '"' + str(row[fidx]) + '"'

            except:
                outVal += strVal
        else:
            outVal += strVal
    if len(calcList) == 1 and outVal == '':
        outVal = calcList[0]
    try:
        if (outVal != "" and outVal != None):
            outVal = eval(outVal)
    except:
        dla.addMessage("Error evaluating:" + outVal)
        dla.showTraceback()
        dla.addError("Error calculating field values:" + outVal)
        outVal = None
    return outVal
Пример #2
0
def calcValue(row,names,calcString):
    # calculate a value based on source fields and/or other expressions
    outVal = ""
    calcList = calcString.split("|")
    for strVal in calcList:
        if strVal in names:
            try:
                fidx = names.index(strVal)
                if str(row[fidx]) != row[fidx]:
                    outVal += str(row[fidx])
                else:
                    outVal += '"' + str(row[fidx]) + '"'
                    
            except:
                outVal += strVal
        else:
            outVal += strVal
    if len(calcList) == 1 and outVal == '':
        outVal = calcList[0]
    try:
        if(outVal != "" and outVal != None):
            outVal = eval(outVal)
    except:
        dla.addMessage("Error evaluating:" + outVal)
        dla.showTraceback()
        dla.addError("Error calculating field values:" + outVal)
        outVal = None
    return outVal
Пример #3
0
def hasCapabilities(url, token, checkList):
    hasit = False
    if token != None and isFeatureLayerUrl(url):
        params = {'f': 'pjson', 'token': token}
        response = sendRequest(url, params)
        if response != None:
            try:
                error = json.dumps(response['error'])
                dla.addError('Unable to access service properties ' + error)
                return False
            except:
                hasit = True

            try:
                capabilities = json.dumps(response['capabilities'])
                dla.addMessage('Service REST capabilities: ' + capabilities)
                for item in checkList:
                    if capabilities.find(item) == -1:
                        #dla.addMessage('Service does not support: ' + item)
                        hasit = False
                    else:
                        dla.addMessage('Service supports: ' + item)
            except:
                dla.addError('Unable to access service capabilities')
                hasit = False
        else:
            dla.addError('Unable to access service')
            hasit = False

    return hasit
Пример #4
0
def getServiceName(url):
    parts = url.split('/')
    lngth = len(parts)
    if len(parts) > 8:
        dla.addMessage("Service Name: " + parts[len(parts) - 3])
        return parts[len(parts) - 3]
    else:
        return None
def writeDocument(sourceDataset,targetDataset,xmlFileName):

    desc = arcpy.Describe(sourceDataset)
    descT = arcpy.Describe(targetDataset)
    sourcePath = getLayerPath(desc)
    targetPath = getLayerPath(descT)

    ## Added May2016. warn user if capabilities are not correct, exit if not a valid layer
    if not dla.checkServiceCapabilities(sourcePath,False):
        dla.addMessage(sourceDataset + ' Does not appear to be a feature service layer, exiting. Check that you selected a layer not a service')
        return False
    if not dla.checkServiceCapabilities(targetPath,False):
        dla.addMessage(targetDataset + ' Does not appear to be a feature service layer, exiting. Check that you selected a layer not a service')
        return False
    
    xmlDoc = Document()
    root = xmlDoc.createElement('SourceTargetMatrix')
    xmlDoc.appendChild(root)
    root.setAttribute("version",'1.1')
    root.setAttribute("xmlns:esri",'http://www.esri.com')

    dataset = xmlDoc.createElement("Datasets")
    root.appendChild(dataset)
    setSourceTarget(dataset,xmlDoc,"Source",sourcePath)
    setSourceTarget(dataset,xmlDoc,"Target",targetPath)
    
    setSpatialReference(dataset,xmlDoc,desc,"Source")
    setSpatialReference(dataset,xmlDoc,descT,"Target")    

    setSourceTarget(dataset,xmlDoc,"ReplaceBy","")
    
    fieldroot = xmlDoc.createElement("Fields")
    root.appendChild(fieldroot)

    fields = getFields(descT,targetDataset)
    sourceFields = getFields(desc,sourceDataset)
    sourceNames = [field.name[field.name.rfind(".")+1:] for field in sourceFields]
    upperNames = [nm.upper() for nm in sourceNames]

    #try:
    for field in fields:
        
        fNode = xmlDoc.createElement("Field")
        fieldroot.appendChild(fNode)
        fieldName = field.name[field.name.rfind(".")+1:]
        matchSourceFields(xmlDoc,fNode,field,fieldName,sourceNames,upperNames)       

    # write the source field values
    setSourceFields(root,xmlDoc,sourceFields)
    setTargetFields(root,xmlDoc,fields)
    # Should add a template section for value maps, maybe write domains...
    # could try to preset field mapping and domain mapping...

    # add some data to the document
    writeDataSample(xmlDoc,root,sourceNames,sourceDataset,10)
    # write it out
    xmlDoc.writexml( open(xmlFileName, 'w'),indent="  ",addindent="  ",newl='\n')
    xmlDoc.unlink()   
Пример #6
0
def simplifyPolygons(sourceDataset):
    # simplfy polygons using approach developed by Chris Bus.
    dla.addMessage("Simplifying (densifying) Parcel Geometry")
    arcpy.Densify_edit(sourceDataset)
    simplify = sourceDataset + '_simplified'
    if arcpy.Exists(simplify):
        arcpy.Delete_management(simplify)
    if arcpy.Exists(simplify + '_Pnt'):
        arcpy.Delete_management(simplify + '_Pnt')
        
    arcpy.SimplifyPolygon_cartography(sourceDataset, simplify, "POINT_REMOVE", "1 Meters")
    return simplify
Пример #7
0
def simplifyPolygons(sourceDataset):
    # simplify polygons using approach developed by Chris Bus.
    dla.addMessage("Simplifying (densifying) Geometry")
    arcpy.Densify_edit(sourceDataset)
    simplify = sourceDataset + '_simplified'
    if arcpy.Exists(simplify):
        arcpy.Delete_management(simplify)
    if arcpy.Exists(simplify + '_Pnt'):
        arcpy.Delete_management(simplify + '_Pnt')

    arcpy.SimplifyPolygon_cartography(sourceDataset, simplify, "POINT_REMOVE", "1 Meters")
    return simplify
def getLayerPath(desc): # requires arcpy.Describe object
    # altered May31 2016 to handle no .path for layer...
    pth = None
    try:
        pth = desc.catalogPath
    except:
        try:
            pth = desc.path
        except:
            dla.addError('Unable to obtain a source path for this layer. Please select a feature layer and re-run this tool')
    if pth != None:
        dla.addMessage(pth)
    return pth
def extract(xmlFileName, rowLimit, workspace, sourceLayer, targetFC):

    xmlDoc = dla.getXmlDoc(xmlFileName)
    if workspace == "" or workspace == "#" or workspace == None:
        dla.workspace = arcpy.env.scratchGDB
    else:
        dla.workspace = workspace
    fields = dla.getFields(xmlFileName)
    success = True
    name = ''
    try:
        if not arcpy.Exists(dla.workspace):
            dla.addMessage(dla.workspace +
                           " does not exist, attempting to create")
            dla.createGeodatabase()
        if len(fields) > 0:
            arcpy.SetProgressor("step", "Importing Layer...", 0, 1, 1)

            if sourceLayer == '' or sourceLayer == '#':
                source = dla.getNodeValue(xmlDoc, "Datasets/Source")
            else:
                source = sourceLayer
            if targetFC == '' or targetFC == '#':
                targetName = dla.getTargetName(xmlDoc)
            else:
                targetName = targetFC[targetFC.rfind(os.sep) + 1:]

            sourceName = dla.getSourceName(xmlDoc)
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " +
                                     targetName + "...")
            #if not arcpy.Exists(sourceLayer):
            #    dla.addError("Layer " + sourceLayer + " does not exist, exiting")
            #    return

            retVal = exportDataset(xmlDoc, sourceLayer, dla.workspace,
                                   targetName, rowLimit)
            if retVal == False:
                success = False

        arcpy.SetProgressorPosition()
    except:
        dla.addError("A Fatal Error occurred")
        dla.showTraceback()
        success = False
    finally:
        arcpy.ResetProgressor()
        #arcpy.RefreshCatalog(dla.workspace)
        arcpy.ClearWorkspaceCache_management(dla.workspace)

    return success
def getLayerPath(desc):  # requires arcpy.Describe object
    # altered May31 2016 to handle no .path for layer...
    pth = None
    try:
        pth = desc.catalogPath
    except:
        try:
            pth = desc.path
        except:
            dla.addError(
                'Unable to obtain a source path for this layer. Please select a feature layer and re-run this tool'
            )
    if pth != None:
        dla.addMessage(pth)
    return pth
def extract(xmlFileName,rowLimit,workspace,sourceLayer,targetFC):          

    xmlDoc = dla.getXmlDoc(xmlFileName)
    if workspace == "" or workspace == "#" or workspace == None:  
        dla.workspace = arcpy.env.scratchGDB
    else:
        dla.workspace = workspace
    fields = dla.getFields(xmlFileName)
    success = True
    name = ''
    try:
        if not arcpy.Exists(dla.workspace):
            dla.addMessage(dla.workspace + " does not exist, attempting to create")
            dla.createGeodatabase()
        if len(fields) > 0:
            arcpy.SetProgressor("step", "Importing Layer...",0,1,1)

            if sourceLayer == '' or sourceLayer == '#':                
                source = dla.getNodeValue(xmlDoc,"Datasets/Source")
            else:
                source = sourceLayer
            if targetFC == '' or targetFC == '#':
                targetName = dla.getTargetName(xmlDoc)
            else:
                targetName = targetFC[targetFC.rfind(os.sep)+1:]

            sourceName = dla.getSourceName(xmlDoc)
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName +"...")
            #if not arcpy.Exists(sourceLayer):
            #    dla.addError("Layer " + sourceLayer + " does not exist, exiting")
            #    return
            
            retVal = exportDataset(xmlDoc,sourceLayer,dla.workspace,targetName,rowLimit)
            if retVal == False:
                success = False

        arcpy.SetProgressorPosition()
    except:
        dla.addError("A Fatal Error occurred")
        dla.showTraceback()
        success = False
    finally:
        arcpy.ResetProgressor()
        #arcpy.RefreshCatalog(dla.workspace)
        arcpy.ClearWorkspaceCache_management(dla.workspace)

    return success
def theProjectWay():
    """
    This function is currently not used. It is an alternative to the create feature class/append approach
    currently being used. It is slower because the entire dataset is projected first, and it is less
    straightforward because it adds the transform method that append seems to know how to handle already.
    It is better though because it will actually raise trappable errors while Append fails silently...
    The solution in the other function is to count the resulting records and report issues.
    """
    if targetRef != '':
        if arcpy.Exists(targetName):
            arcpy.Delete_management(targetName)
        inttable = workspace + os.sep + targetName + "_prj"
        arcpy.env.workspace = workspace
        xform = None
        desc = arcpy.Describe(sourceLayer)
        xforms = arcpy.ListTransformations(desc.spatialReference, targetRef,
                                           desc.extent)
        #if sourceRef.exportToString().find("NAD_1983") > -1 and targetRef.exportToString().find("WGS_1984") > -1:
        xform = xforms[0]
        #for xform in xforms:
        dla.addMessage("Transform: " + xform)
        try:
            res = arcpy.Project_management(sourceLayer,
                                           inttable,
                                           out_coor_system=targetRef,
                                           transform_method=xform)
        except:
            dla.showTraceback()
            err = "Unable to project the data to the target spatial reference, please check settings and try projecting manually in ArcGIS"
            dla.addError(err)
            return False
        dla.addMessage("Features projected")
        view = dla.makeFeatureViewForLayer(dla.workspace, inttable, viewName,
                                           whereClause, xmlFields)
        dla.addMessage("View Created")
        #except:
        #    arcpy.AddError("Unabled to create feature View " + viewName)
        count = arcpy.GetCount_management(view).getOutput(0)
        dla.addMessage(str(count) + " source rows")
        #sourceRef = getSpatialReference(xmlDoc,"Source")
        #res = arcpy.CreateFeatureclass_management(workspace,targetName,template=sourceLayer,spatial_reference=targetRef)
        res = arcpy.CopyFeatures_management(view, targetName)
        dla.addMessage("Features copied")
def extract(xmlFileName, rowLimit, workspace, source, target, datasetType):

    xmlDoc = dla.getXmlDoc(xmlFileName)
    if workspace == "" or workspace == "#" or workspace == None:
        dla.workspace = dla.setWorkspace()
    else:
        dla.workspace = workspace
    fields = dla.getFields(xmlFileName)
    success = True
    name = ''
    try:
        if not arcpy.Exists(dla.workspace):
            dla.addMessage(dla.workspace +
                           " does not exist, attempting to create")
            dla.createGeodatabase()
        if len(fields) > 0:
            arcpy.SetProgressor("step", "Importing Layer...", 0, 1, 1)

            targetName = dla.getDatasetName(target)
            sourceName = dla.getDatasetName(source)
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " +
                                     targetName + "...")

            if not arcpy.Exists(source):
                dla.addError("Layer " + source + " does not exist, exiting")
                return

            retVal = exportDataset(xmlDoc, source, dla.workspace, targetName,
                                   rowLimit, datasetType)
            if retVal == False:
                success = False

        arcpy.SetProgressorPosition()
    except:
        dla.addError("A Fatal Error occurred")
        dla.showTraceback()
        success = False
    finally:
        arcpy.ResetProgressor()
        #arcpy.RefreshCatalog(dla.workspace)
        arcpy.ClearWorkspaceCache_management(dla.workspace)

    return success
Пример #14
0
def test2():

    dla._project = arcpy.mp.ArcGISProject(
        r"C:\Users\Steve\Documents\ArcGIS\Projects\pbmpolygons\pbmpolygons.aprx"
    )
    ws = r"C:\Users\Steve\Documents\ArcGIS\Projects\MyProject11\shp\Data Assistant 10.4 Testing\pbmnorepair.gdb"
    base = "pbmpoly"
    res = arcpy.GetCount_management(os.path.join(ws, base))
    cnt = int(res.getOutput(0))
    chunk = 100000
    lngth = int(cnt / chunk)

    for group in range(0, lngth):

        minoid = group * chunk
        where = 'OBJECTID > ' + str(minoid) + ' AND OBJECTID <= ' + str(
            minoid + chunk)
        dla.addMessage(where)

        layername = "pbmpolys"
        if arcpy.Exists(layername):
            arcpy.Delete_management(layername)
        result = arcpy.MakeFeatureLayer_management(in_features=os.path.join(
            ws, base),
                                                   where_clause=where,
                                                   workspace=ws,
                                                   out_layer=layername)

        cnt = result.getOutput(0)
        outpath = r"C:\Users\Steve\Documents\ArcGIS\Projects\MyProject11\shp\Data Assistant 10.4 Testing\pbm.gdb"
        outname = "pbmpoly" + str(group)
        ds = os.path.join(outpath, outname)
        if arcpy.Exists(ds):
            arcpy.Delete_management(ds)
        arcpy.FeatureClassToFeatureClass_conversion(in_features=layername,
                                                    out_path=outpath,
                                                    out_name=outname)

        outdoc = r"C:\Users\Steve\Documents\ArcGIS\Projects\pbmpolygons\pbm" + str(
            group) + ".xml"
        svce = r"http://services.arcgis.com/b6gLrKHqgkQb393u/arcgis/rest/services/TaxDistribution/FeatureServer/0"

        dlaCreateSourceTarget.createDlaFile(ds, svce, outdoc)
def getLayerPath(pth): # requires string for layer argument
    # altered May31 2016 to handle no .path for layer...
    # altered August 2016 - Pro 1.3.1 changes in urls for feature service layers
    if pth != None:
        pthurl = dla.getLayerSourceUrl(pth)
        desc = arcpy.Describe(pthurl)
        try:
            pth = desc.catalogPath
            #dla.addMessage("catalogPath:" + pth)
        except:
            try:
                pth = desc.path
                #dla.addMessage("path:" + pth)
            except:
                dla.addError('Unable to obtain a source path for this layer. Please select a feature layer and re-run this tool')
                pth = None
        if pth != None:
            pth = dla.getLayerServiceUrl(pth)
            dla.addMessage("Output path:" + pth)
    return pth
def removeDefaultValues(dataset):
    # exported source fields may contain DefaultValues, which can replace None/null values in field calculations
    sourceFields = arcpy.ListFields(
        dataset)  # xmlDoc.getElementsByTagName("SourceField")
    #stypes = arcpy.da.ListSubtypes(dataset) # my current understanding is that the intermediate/exported dataset will not have subtypes, just default/0 subtype if present in source dataset.

    dla.addMessage(
        "Removing Default Value property from intermediate database fields")
    for sfield in sourceFields:
        fname = sfield.name
        if sfield.defaultValue != None:
            try:
                arcpy.AssignDefaultToField_management(
                    in_table=dataset,
                    field_name=fname,
                    default_value=None,
                    clear_value=True)  # clear the Defaults
            except:
                dla.addMessage(
                    "Unable to set DefaultValue for " + fname
                )  # skip GlobalIDs/other fields that cannot be updated. Should not have a default set in these cases
Пример #17
0
def getOIDs(targelUrl,expr):
    # get the list of oids.
    ids = []
    arcpy.SetProgressor("default","Querying Existing Features")
    arcpy.SetProgressorLabel("Querying Existing Features")
    url = targelUrl + '/query'
    #dla.addMessage("Url:"+url)
    token = dla.getSigninToken()
    if expr != '':
        params = {'f': 'pjson', 'where': expr,'token':token,'returnIdsOnly':'true'}
    else:
        params = {'f': 'pjson', 'where': '1=1','token':token,'returnIdsOnly':'true'}
        
    #dla.addMessage("Params:"+json.dumps(params))
    result = dla.sendRequest(url,params)            
    try:
        if result['error'] != None:
            retval = False
            dla.addMessage("Query features from Feature Service failed")
            dla.addMessage(json.dumps(result))
            error = True
    except:
        ids = result['objectIds']
        lenFound = len(ids)
        msg = str(lenFound) + " features found in existing Service"
        print(msg)
        dla.addMessage(msg)
        retval = True

    return ids    
def getFieldMap(view, ds):

    fieldMaps = arcpy.FieldMappings()
    fieldMaps.addTable(ds)
    inFields = [
        field.name for field in arcpy.ListFields(view)
        if field.name.upper() not in dla._ignoreFields
    ]  # not field.required removed after .Enabled issue
    removenames = []
    for i in range(fieldMaps.fieldCount):
        field = fieldMaps.fields[i]
        fmap = fieldMaps.getFieldMap(i)
        fName = field.name
        for s in range(0, fmap.inputFieldCount):
            try:
                fmap.removeInputField(0)
            except:
                pass
        try:
            f = -1
            try:
                f = inFields.index(fName)  # simple case where names are equal
            except:
                f = inFields.index(
                    fName.replace('_', '.', 1)
                )  # just replace the first char - more complex case like xfmr.phase_designation
            if f > -1:
                inField = inFields[f]
                fmap.addInputField(view, inField)
                fieldMaps.replaceFieldMap(i, fmap)
        except:
            removenames.append(fName)

    for name in removenames:
        i = fieldMaps.findFieldMapIndex(name)
        fieldMaps.removeFieldMap(i)
        dla.addMessage(name + ' removed from fieldMappings')

    return fieldMaps
def theProjectWay():
    """
    This function is currently not used. It is an alternative to the create feature class/append approach
    currently being used. It is slower because the entire dataset is projected first, and it is less
    straightforward because it adds the transform method that append seems to know how to handle already.
    It is better though because it will actually raise trappable errors while Append fails silently...
    The solution in the other function is to count the resulting records and report issues.
    """
    if targetRef != '':
        if arcpy.Exists(targetName):
            arcpy.Delete_management(targetName)
        inttable = workspace+os.sep+targetName+"_prj"
        arcpy.env.workspace = workspace
        xform = None
        desc = arcpy.Describe(sourceLayer)
        xforms = arcpy.ListTransformations(desc.spatialReference, targetRef, desc.extent)            
        #if sourceRef.exportToString().find("NAD_1983") > -1 and targetRef.exportToString().find("WGS_1984") > -1:
        xform = xforms[0]
        #for xform in xforms:
        dla.addMessage("Transform: " + xform)
        try:
            res = arcpy.Project_management(sourceLayer,inttable,out_coor_system=targetRef,transform_method=xform)
        except:
            dla.showTraceback()
            err = "Unable to project the data to the target spatial reference, please check settings and try projecting manually in ArcGIS"
            dla.addError(err)
            return False
        dla.addMessage("Features projected")            
        view = dla.makeFeatureViewForLayer(dla.workspace,inttable,viewName,whereClause,xmlFields)
        dla.addMessage("View Created")            
        #except:
        #    arcpy.AddError("Unabled to create feature View " + viewName)
        count = arcpy.GetCount_management(view).getOutput(0)
        dla.addMessage(str(count) + " source rows")
        #sourceRef = getSpatialReference(xmlDoc,"Source")
        #res = arcpy.CreateFeatureclass_management(workspace,targetName,template=sourceLayer,spatial_reference=targetRef)
        res = arcpy.CopyFeatures_management(view,targetName)
        dla.addMessage("Features copied")     
def main(argv=None):
    global sourceDataset, targetDataset, xmlFileName
    dla.addMessage(sourceDataset)
    dla.addMessage(targetDataset)
    dla.addMessage(xmlFileName)
    if not os.path.exists(matchxslt):
        msg = matchxslt + " does not exist, exiting"
        arcpy.AddError(msg)
        print(msg)
        return
    if not os.path.exists(matchfile):
        msg = matchfile + " does not exist, exiting"
        arcpy.AddError(msg)
        print(msg)
        return
    createDlaFile(sourceDataset, targetDataset, xmlFileName)
def main(argv = None):
    global sourceDataset,targetDataset,xmlFileName   
    dla.addMessage(sourceDataset)
    dla.addMessage(targetDataset)
    dla.addMessage(xmlFileName)
    if not os.path.exists(matchxslt):
        msg = matchxslt + " does not exist, exiting"
        arcpy.AddError(msg)
        print(msg)
        return
    if not os.path.exists(matchfile):
        msg = matchfile + " does not exist, exiting"
        arcpy.AddError(msg)
        print(msg)
        return
    createDlaFile(sourceDataset,targetDataset,xmlFileName)
def main(argv = None):
    global source,target,xmlFileName   

    source = dla.checkIsLayerFile(source,sourceStr)
    target = dla.checkIsLayerFile(target,targetStr)

    dla.addMessage("Source: " + str(source))
    dla.addMessage("Target: " + str(target))

    dla.addMessage("File: " + xmlFileName)
    if not os.path.exists(matchxslt):
        msg = matchxslt + " does not exist, exiting"
        arcpy.AddError(msg)
        print(msg)
        return
    if not os.path.exists(matchfile):
        msg = matchfile + " does not exist, exiting"
        arcpy.AddError(msg)
        print(msg)
        return
    createDlaFile(source,target,xmlFileName)
Пример #23
0
def getOIDs(targelUrl, expr):
    # get the list of oids.
    ids = []
    arcpy.SetProgressor("default", "Querying Existing Features")
    arcpy.SetProgressorLabel("Querying Existing Features")
    url = targelUrl + '/query'
    #dla.addMessage("Url:"+url)
    token = dla.getSigninToken()
    if expr != '':
        params = {
            'f': 'pjson',
            'where': expr,
            'token': token,
            'returnIdsOnly': 'true'
        }
    else:
        params = {
            'f': 'pjson',
            'where': '1=1',
            'token': token,
            'returnIdsOnly': 'true'
        }

    #dla.addMessage("Params:"+json.dumps(params))
    result = dla.sendRequest(url, params)
    try:
        if result['error'] != None:
            retval = False
            dla.addMessage("Query features from Feature Service failed")
            dla.addMessage(json.dumps(result))
            error = True
    except:
        ids = result['objectIds']
        lenFound = len(ids)
        msg = str(lenFound) + " features found in existing Service"
        print(msg)
        dla.addMessage(msg)
        retval = True

    return ids
Пример #24
0
def setFieldValues(table, fields, names, ftypes, lengths):
    # from source xml file match old values to new values to prepare for append to target geodatabase
    success = False
    row = None
    try:
        updateCursor = arcpy.da.UpdateCursor(table, names)

        result = arcpy.GetCount_management(table)
        numFeat = int(result.getOutput(0))
        dla.addMessage(table + ", " + str(numFeat) + " features")
        i = 0
        arcpy.SetProgressor("Step", "Calculating " + table + "...", 0, numFeat,
                            getProgressUpdate(numFeat))

        for row in updateCursor:
            success = True
            if dla._errCount > dla.maxErrorCount:
                dla.addError(
                    "Exceeded max number of errors in dla.maxErrorCount: " +
                    str(dla.maxErrorCount))
                return False
            if i > dla.maxrows:
                dla.addError(
                    "Exceeded max number of rows supported in dla.maxrows: " +
                    str(dla.maxrows))
                return True
            i = i + 1
            setProgressor(i, numFeat)

            for field in fields:
                method = "None"
                sourceName = dla.getNodeValue(field,
                                              "SourceName").replace('.', '_')
                targetName = dla.getNodeValue(field,
                                              "TargetName").replace('.', '_')

                targetValue = getTargetValue(row, field, names, sourceName,
                                             targetName)
                sourceValue = getSourceValue(row, names, sourceName,
                                             targetName)
                method = dla.getNodeValue(field, "Method").replace(" ", "")
                try:
                    fnum = dla.getFieldIndexList(names, targetName)
                except:
                    fnum = None  # defensive check to skip fields that do not exist even though they are listed in Xml

                if fnum != None:
                    if method == "None" or (method == "Copy"
                                            and sourceName == '(None)'):
                        method = "None"
                        val = None
                    elif method == "Copy":
                        val = sourceValue
                    elif method == "DefaultValue":
                        val = dla.getNodeValue(field, "DefaultValue")
                    elif method == "SetValue":
                        val = dla.getNodeValue(field, "SetValue")
                    elif method == "ValueMap":
                        val = getValueMap(targetName, sourceValue, field)
                    elif method == "DomainMap":
                        val = getDomainMap(row, sourceValue, field)
                    elif method == "ChangeCase":
                        case = dla.getNodeValue(field, method)
                        expression = getChangeCase(sourceValue, case)
                        val = getExpression(row, names, expression)
                    elif method == "Concatenate":
                        val = getConcatenate(row, names, field)
                    elif method == "Left":
                        chars = dla.getNodeValue(field, "Left")
                        val = getSubstring(sourceValue, "0", chars)
                    elif method == "Right":
                        chars = dla.getNodeValue(field, "Right")
                        val = getSubstring(sourceValue,
                                           len(str(sourceValue)) - int(chars),
                                           len(str(sourceValue)))
                    elif method == "Substring":
                        start = dla.getNodeValue(field, "Start")
                        length = dla.getNodeValue(field, "Length")
                        val = getSubstring(sourceValue, start, length)
                    elif method == "Split":
                        splitter = dla.getNodeValue(field, "SplitAt")
                        splitter = splitter.replace("(space)", " ")
                        part = dla.getNodeValue(field, "Part")
                        val = getSplit(sourceValue, splitter, part)
                    elif method == "ConditionalValue":
                        sname = dla.getNodeValue(field, "SourceName")
                        oper = dla.getNodeValue(field, "Oper")
                        iif = dla.getNodeValue(field, "If")
                        if iif != " " and type(iif) == 'str':
                            for name in names:
                                if name in iif:
                                    iif = iif.replace(name, "|" + name + "|")
                        tthen = dla.getNodeValue(field, "Then")
                        eelse = dla.getNodeValue(field, "Else")
                        for name in names:
                            if name in eelse:
                                eelse = eelse.replace(name, "|" + name + "|")
                        expression = "|" + tthen + "| " + " if |" + sname + "| " + oper + " |" + iif + "| else " + eelse
                        val = getExpression(row, names, expression)
                    elif method == "Expression":
                        expression = dla.getNodeValue(field, method)
                        for name in names:
                            expression = expression.replace(
                                name, "|" + name + "|")
                        val = getExpression(row, names, expression)
                    # set field value
                    newVal = getValue(ftypes[fnum], lengths[fnum], targetName,
                                      targetValue, val)
                    row[fnum] = newVal
                    if dla.debug == True:
                        dla.addMessage(targetName + ':' + str(newVal) + ':' +
                                       str(targetValue))
            try:
                updateCursor.updateRow(row)
                #printRow(row,names)
            except:
                dla._errCount += 1
                success = False
                err = "Exception caught: unable to update row"
                if dla._errCount < 200:
                    printRow(row, names)
                    dla.showTraceback()
                else:
                    if dla._errCount < 2000:
                        dla.addMessage(
                            'More than 200 errors encountered... debug output suppressed'
                        )
                dla.addError(err)
    except:
        dla._errCount += 1
        success = False
        err = "Exception caught: unable to update dataset"
        if row != None:
            printRow(row, names)
        dla.showTraceback()
        dla.addError(err)

    finally:
        del updateCursor
        dla.cleanupGarbage()
        arcpy.ResetProgressor()

    return success
Пример #25
0
def publish(xmlFileNames):
    # function called from main or from another script, performs the data update processing
    global sourceLayer,targetLayer,_success
    dla._errorCount = 0

    arcpy.SetProgressor("default","Data Assistant")
    arcpy.SetProgressorLabel("Data Assistant")
    xmlFiles = xmlFileNames.split(";")
    for xmlFile in xmlFiles: # multi value parameter, loop for each file
        dla.addMessage("Configuration file: " + xmlFile)
        xmlDoc = dla.getXmlDoc(xmlFile) # parse the xml document
        if xmlDoc == None:
            return
        svceS = False
        svceT = False
        if sourceLayer == "" or sourceLayer == None:
            sourceLayer = dla.getNodeValue(xmlDoc,"Source")
            svceS = dla.checkLayerIsService(sourceLayer)
        if targetLayer == "" or targetLayer == None:
            targetLayer = dla.getNodeValue(xmlDoc,"Target")
            svceT = dla.checkLayerIsService(targetLayer)

        dla.addMessage(targetLayer)
        ## Added May2016. warn user if capabilities are not correct, exit if not a valid layer
        if not dla.checkServiceCapabilities(sourceLayer,True):
            return False
        if not dla.checkServiceCapabilities(targetLayer,True):
            return False

        if svceS == True or svceT == True:
            token = dla.getSigninToken() # when signed in get the token and use this. Will be requested many times during the publish
            if token == None:
                dla.addError("User must be signed in for this tool to work with services")
                return

        expr = getWhereClause(xmlDoc)
        if useReplaceSettings == True and (expr == '' or expr == None):
            dla.addError("There must be an expression for replacing by field value, current value = " + str(expr))
            return False

        dla.setWorkspace()
        targetName = dla.getTargetName(xmlDoc)
        res = dlaExtractLayerToGDB.extract(xmlFile,None,dla.workspace,sourceLayer,targetName)
        if res != True:
            table = dla.getTempTable(targetName)
            msg = "Unable to export data, there is a lock on existing datasets or another unknown error"
            if arcpy.TestSchemaLock(table) != True:
                msg = "Unable to export data, there is a lock on the intermediate feature class: " + table
            dla.addError(msg)
            print(msg)
            return
        else:
            res = dlaFieldCalculator.calculate(xmlFile,dla.workspace,targetName,False)
            if res == True:
                dlaTable = dla.getTempTable(targetName)
                res = doPublish(xmlDoc,dlaTable,targetLayer)

        arcpy.ResetProgressor()
        sourceLayer = None # set source and target back to None for multiple file processing
        targetLayer = None
        if res == False:
            err = "Data Assistant Update Failed, see messages for details"
            dla.addError(err)
            print(err)
Пример #26
0
def publish(xmlFileNames):
    # function called from main or from another script, performs the data update processing
    global _useReplaceSettings
    dla._errCount = 0

    arcpy.SetProgressor("default","Data Assistant")
    arcpy.SetProgressorLabel("Data Assistant")
    xmlFiles = xmlFileNames.split(";")
    layers = []

    for xmlFile in xmlFiles: # multi value parameter, loop for each file
        xmlFile = dla.getXmlDocName(xmlFile)
        dla.addMessage("Configuration file: " + xmlFile)
        xmlDoc = dla.getXmlDoc(xmlFile) # parse the xml document
        if xmlDoc == None:
            return
        prj = dla.setProject(xmlFile,dla.getNodeValue(xmlDoc,"Project"))
        if prj == None:
            dla.addError("Unable to open your project, please ensure it is in the same folder as your current project or your Config file")
            return False

        source = dla.getDatasetPath(xmlDoc,"Source")
        target = dla.getDatasetPath(xmlDoc,"Target")
        targetName = dla.getDatasetName(target)
        dla.addMessage(source)
        dla.addMessage(target)

        if dlaService.checkLayerIsService(source) or dlaService.checkLayerIsService(target):
            token = dlaService.getSigninToken() # when signed in get the token and use this. Will be requested many times during the publish
            # exit here before doing other things if not signed in
            if token == None:
                dla.addError("User must be signed in for this tool to work with services")
                return False

        expr = getWhereClause(xmlDoc)
        if _useReplaceSettings == True and (expr == '' or expr == None):
            dla.addError("There must be an expression for replacing by field value, current value = " + str(expr))
            return False

        errs = False
        if dlaService.validateSourceUrl(source) == False:
            dla.addError("Source path does not appear to be a valid feature layer")
            errs = True

        if _useReplaceSettings == True:
            if dlaService.validateTargetReplace(target) == False:
                dla.addError("Target path does not have correct privileges")
                errs = True
        elif _useReplaceSettings == False:
            if dlaService.validateTargetAppend(target) == False:
                dla.addError("Target path does not have correct privileges")
                errs = True

        if errs:
            return False


        dla.setWorkspace()

        if dla.isTable(source) or dla.isTable(target):
            datasetType = 'Table'
        else:
            datasetType = 'FeatureClass'

        if not dla.isStaged(xmlDoc):
            res = dlaExtractLayerToGDB.extract(xmlFile,None,dla.workspace,source,target,datasetType)
            if res != True:
                table = dla.getTempTable(targetName)
                msg = "Unable to export data, there is a lock on existing datasets or another unknown error"
                if arcpy.TestSchemaLock(table) != True and arcpy.Exists(table) == True:
                    msg = "Unable to export data, there is a lock on the intermediate feature class: " + table
                dla.addError(msg)
                print(msg)
                return
            else:
                res = dlaFieldCalculator.calculate(xmlFile,dla.workspace,targetName,False)
                if res == True:
                    dlaTable = dla.getTempTable(targetName)
                    res = doPublish(xmlDoc,dlaTable,target,_useReplaceSettings)
        else:
            dla.addMessage('Data previously staged, will proceed using intermediate dataset')
            dlaTable = dla.workspace + os.sep + dla.getStagingName(source,target)
            res = doPublish(xmlDoc,dlaTable,target,_useReplaceSettings)
            if res == True:
                dla.removeStagingElement(xmlDoc)
                xmlDoc.writexml(open(xmlFile, 'wt', encoding='utf-8'))
                dla.addMessage('Staging element removed from config file')

        arcpy.ResetProgressor()
        if res == False:
            err = "Data Assistant Update Failed, see messages for details"
            dla.addError(err)
            print(err)
        else:
            layers.append(target)

    arcpy.SetParameter(_outParam,';'.join(layers))
Пример #27
0
def addFeatures(sourceLayer,targelUrl,expr):
    # add features using _chunkSize
    retval = False
    error = False
    # add section
    try:
        arcpy.SetProgressor("default","Adding Features")
        arcpy.SetProgressorLabel("Adding Features")
        featurejs = featureclass_to_json(sourceLayer)
        url = targelUrl + '/addFeatures'  
        numFeat = len(featurejs['features'])
        if numFeat == 0:
            dla.addMessage("0 Features to Add, exiting")            
            return True # nothing to add is OK
        if numFeat > _chunkSize:
            chunk = _chunkSize
        else:
            chunk = numFeat
        featuresProcessed = 0
        while featuresProcessed < numFeat  and error == False:
            next = featuresProcessed + chunk
            features = featurejs['features'][featuresProcessed:next]
            msg = "Adding features " + str(featuresProcessed) + ":" + str(next)
            dla.addMessage(msg)
            arcpy.SetProgressorLabel(msg)
            token = dla.getSigninToken()
            params = {'rollbackonfailure': 'true','f':'json', 'token':token, 'features': json.dumps(features)}
            result = dla.sendRequest(url,params)            
            try:
                if result['error'] != None:
                    retval = False
                    dla.addMessage("Add features to Feature Service failed")
                    dla.addMessage(json.dumps(result))
                    error = True
            except:
                try:
                    lenAdded = len(result['addResults']) 
                    msg = str(lenAdded) + " features added, " + str(featuresProcessed + chunk) + "/" + str(numFeat)
                    print(msg)
                    dla.addMessage(msg)
                    retval = True
                except:
                    retval = False
                    dla.addMessage("Add features to Feature Service failed")
                    dla.showTraceback()
                    dla.addError(json.dumps(result))
                    error = True
            featuresProcessed += chunk
    except:
        retval = False
        dla.addMessage("Add features to Feature Service failed")
        dla.showTraceback()
        error = True
        pass

    return retval
Пример #28
0
def deleteFeatures(sourceLayer,targelUrl,expr):
    # delete features using chunks of _chunkSize
    retval = False
    error = False
    # delete section
    ids = getOIDs(targelUrl,expr)
    try:
        lenDeleted = 100
        #Chunk deletes using chunk size at a time
        featuresProcessed = 0
        numFeat = len(ids)
        if numFeat == 0:
            dla.addMessage("0 Features to Delete, exiting")            
            return True # nothing to delete is OK
        if numFeat > _chunkSize:
            chunk = _chunkSize
        else:
            chunk = numFeat
        arcpy.SetProgressor("default","Deleting Features")
        while featuresProcessed < numFeat and error == False:
            #Chunk deletes using chunk size at a time
            next = featuresProcessed + chunk
            msg = "Deleting features " + str(featuresProcessed) + ":" + str(next)
            dla.addMessage(msg)
            arcpy.SetProgressorLabel(msg)
            oids = ",".join(str(e) for e in ids[featuresProcessed:next])
            url = targelUrl + '/deleteFeatures'
            token = dla.getSigninToken()
            params = {'f': 'pjson', 'objectIds': oids,'token':token}
            result = dla.sendRequest(url,params)            
            try:
                if result['error'] != None:
                    retval = False
                    dla.addMessage("Delete features from Feature Service failed")
                    dla.addMessage(json.dumps(result))
                    error = True
            except:
                try:
                    lenDeleted = len(result['deleteResults'])
                    msg = str(lenDeleted) + " features deleted, " + str(featuresProcessed + chunk) + "/" + str(numFeat)
                    print(msg)
                    dla.addMessage(msg)
                    retval = True
                except:
                    retval = False
                    error = True
                    dla.showTraceback()
                    dla.addMessage("Delete features from Feature Service failed")
                    dla.addError(json.dumps(result))
            featuresProcessed += chunk
    except:
        retval = False
        error = True
        dla.showTraceback()
        dla.addMessage("Delete features from Feature Service failed")
        pass

    return retval
def exportDataset(xmlDoc, source, workspace, targetName, rowLimit,
                  datasetType):
    result = True
    xmlFields = xmlDoc.getElementsByTagName("Field")
    dla.addMessage("Exporting Data from " + source)
    whereClause = ""
    if rowLimit != None:
        whereClause = getObjectIdWhereClause(source, rowLimit)

    if whereClause != '' and whereClause != ' ':
        dla.addMessage("Where " + str(whereClause))

    sourceName = dla.getDatasetName(source)
    viewName = sourceName + "_View"
    dla.addMessage(viewName)

    targetRef = getSpatialReference(xmlDoc, "Target")
    sourceRef = getSpatialReference(xmlDoc, "Source")
    if datasetType == 'Table':
        isTable = True
    elif targetRef != '':
        isTable = False

    arcpy.env.workspace = workspace
    if source.lower().endswith('.lyrx') and not dla.hasJoin(source):
        view = dla.getLayerFromString(source)
    elif isTable:
        view = dla.makeTableView(dla.workspace, source, viewName, whereClause,
                                 xmlFields)
    elif not isTable:
        view = dla.makeFeatureView(dla.workspace, source, viewName,
                                   whereClause, xmlFields)

    dla.addMessage("View Created")
    srcCount = arcpy.GetCount_management(view).getOutput(0)
    dla.addMessage(str(srcCount) + " source rows")
    if str(srcCount) == '0':
        result = False
        dla.addError("Failed to extract " + sourceName + ", Nothing to export")
    else:
        arcpy.env.overwriteOutput = True
        ds = workspace + os.sep + targetName
        currentPreserveGlobalIDs = arcpy.env.preserveGlobalIds
        if dla.processGlobalIds(
                xmlDoc
        ):  # both datasets have globalids in the correct workspace types
            arcpy.env.preserveGlobalIds = True  # try to preserve
            dla.addMessage("Attempting to preserve GlobalIDs")
        else:
            arcpy.env.preserveGlobalIds = False  # don't try to preserve
            dla.addMessage("Unable to preserve GlobalIDs")
        if isTable:
            arcpy.TableToTable_conversion(in_rows=view,
                                          out_path=workspace,
                                          out_name=targetName)
        else:
            spRefMatch = dla.compareSpatialRef(xmlDoc)
            currentRef = arcpy.env.outputCoordinateSystem  # grab currrent env settings
            currentTrans = arcpy.env.geographicTransformations

            if not spRefMatch:
                arcpy.env.outputCoordinateSystem = targetRef
                transformations = arcpy.ListTransformations(
                    sourceRef, targetRef)
                transformations = ";".join(
                    transformations
                )  # concat the values - format change for setting the values.
                arcpy.env.geographicTransformations = transformations

            arcpy.FeatureClassToFeatureClass_conversion(in_features=view,
                                                        out_path=workspace,
                                                        out_name=targetName)

            if not spRefMatch:  # set the spatial reference back
                arcpy.env.outputCoordinateSystem = currentRef
                arcpy.env.geographicTransformations = currentTrans
        arcpy.env.preserveGlobalIds = currentPreserveGlobalIDs

        removeDefaultValues(
            ds
        )  # don't want to turn nulls into defaultValues in the intermediate data

        # not needed if doing the transformations approach above...
        #    if isTable:
        #        if not createDataset('Table',workspace,targetName,None,xmlDoc,source,None):
        #            arcpy.AddError("Unable to create intermediate table, exiting: " + workspace + os.sep + targetName)
        #            return False

        #    elif not isTable:
        #        geomType = arcpy.Describe(source).shapeType
        #        if not createDataset('FeatureClass',workspace,targetName,geomType,xmlDoc,source,targetRef):
        #            arcpy.AddError("Unable to create intermediate feature class, exiting: " + workspace + os.sep + targetName)
        #            return False
        #    fieldMap = getFieldMap(view,ds)
        #    arcpy.Append_management(view,ds,schema_type="NO_TEST",field_mapping=fieldMap)

        dla.addMessage(arcpy.GetMessages(2))  # only serious errors
        count = arcpy.GetCount_management(ds).getOutput(0)
        dla.addMessage(str(count) + " source rows exported to " + targetName)
        if str(count) == '0':
            result = False
            dla.addError(
                "Failed to load to " + targetName +
                ", it is likely that your data falls outside of the target Spatial Reference Extent or there is another basic issue"
            )
            dla.addError(
                "To verify please use the Append and/or Copy Features tool to load some data to an intermediate dataset:"
            )
            dla.addError(ds)
            dla.showTraceback()
    return result
Пример #30
0
def setFieldValues(table,fields,names,ftypes,lengths):
    # from source xml file match old values to new values to prepare for append to target geodatabase
    success = False
    row = None
    try:
        updateCursor = arcpy.da.UpdateCursor(table,names)

        result = arcpy.GetCount_management(table)
        numFeat = int(result.getOutput(0))
        dla.addMessage(table + ", " + str(numFeat) + " features")
        i = 0
        arcpy.SetProgressor("Step","Calculating " + table + "...",0,numFeat,getProgressUpdate(numFeat))
        
        for row in updateCursor:
            success = True
            if dla._errCount > dla.maxErrorCount:
                #dla.addMessage("Exceeded max number of errors in dla.maxErrorCount: " + str(dla.maxErrorCount))
                dla.addError("Exceeded max number of errors in dla.maxErrorCount: " + str(dla.maxErrorCount))
                return False
            if i > dla.maxrows:
                #dla.addMessage("Exceeded max number of rows supported in dla.maxrows: " + str(dla.maxrows))
                dla.addError("Exceeded max number of rows supported in dla.maxrows: " + str(dla.maxrows))
                return True
            i = i + 1
            setProgressor(i,numFeat)
            
            for field in fields:
                method = "None"
                sourceName = dla.getNodeValue(field,"SourceName")
                targetName = dla.getNodeValue(field,"TargetName")
                    
                targetValue = getTargetValue(row,field,names,sourceName,targetName)
                sourceValue = getSourceValue(row,names,sourceName,targetName)
                method = dla.getNodeValue(field,"Method").replace(" ","")
                fnum = names.index(targetName)

                if method == "None" or (method == "Copy" and sourceName == dla._noneFieldName):
                    val = None
                    method = "None"
                elif method == "Copy":
                    val = sourceValue
                elif method == "DefaultValue":
                    val = dla.getNodeValue(field,"DefaultValue")
                elif method == "SetValue":
                    val = dla.getNodeValue(field,"SetValue")
                elif method == "ValueMap":
                    val = getValueMap(row,names,sourceValue,field)
                elif method == "ChangeCase":
                    case = dla.getNodeValue(field,method)                    
                    expression = getChangeCase(sourceValue,case)
                    val = getExpression(row,names,expression)
                elif method == "Concatenate":
                    val = getConcatenate(row,names,field)
                elif method == "Left":
                    chars = dla.getNodeValue(field,"Left")
                    val = getSubstring(sourceValue,"0",chars)
                elif method == "Right":
                    chars = dla.getNodeValue(field,"Right")
                    val = getSubstring(sourceValue,len(str(sourceValue))-int(chars),len(str(sourceValue)))
                elif method == "Substring":
                    start = int(dla.getNodeValue(field,"Start"))
                    lngth = int(dla.getNodeValue(field,"Length"))
                    if sourceValue != None:
                        lngth = start + lngth
                    val = getSubstring(sourceValue,start,lngth)
                elif method == "Split":
                    splitter = dla.getNodeValue(field,"SplitAt")
                    splitter = splitter.replace("(space)"," ")
                    part = dla.getNodeValue(field,"Part")
                    val = getSplit(sourceValue,splitter,part)
                elif method == "ConditionalValue":
                    sname = dla.getNodeValue(field,"SourceName")
                    oper = dla.getNodeValue(field,"Oper")
                    iif = dla.getNodeValue(field,"If")
                    if iif != " " and type(iif) == 'str':
                        for name in names:
                            if name in iif:
                                iif = iif.replace(name,"|"+name+"|")
                    tthen = dla.getNodeValue(field,"Then")
                    eelse = dla.getNodeValue(field,"Else")
                    for name in names:
                        if name in eelse:
                            eelse = eelse.replace(name,"|"+name+"|")
                    expression = "|" + tthen + "| " + " if |" + sname + "| " + oper + " |" + iif + "| else " + eelse
                    val = getExpression(row,names,expression)
                elif method == "Expression":
                    expression = dla.getNodeValue(field,method)
                    for name in names:
                        expression = expression.replace(name,"|" + name + "|")
                    val = getExpression(row,names,expression)
                # set field value
                if method != "None" and val != None:
                    newVal = getValue(names,ftypes,lengths,targetName,targetValue,val)
                    row[fnum] = newVal
                else:
                    row[fnum] = val
            try:
                updateCursor.updateRow(row)
            except:
                dla._errCount += 1
                success = False
                err = "Exception caught: unable to update row"
                printRow(row,names)
                dla.showTraceback()
                dla.addError(err)
    except:
        dla._errCount += 1
        success = False
        err = "Exception caught: unable to update dataset"
        if row != None:
            printRow(row,names)
        dla.showTraceback()
        dla.addError(err)

    finally:
        del updateCursor
        dla.cleanupGarbage()
        arcpy.ResetProgressor()

    return success
def exportDataset(xmlDoc,sourceLayer,workspace,targetName,rowLimit):
    result = True
    xmlFields = xmlDoc.getElementsByTagName("Field")
    dla.addMessage("Exporting Layer from " + sourceLayer)
    whereClause = ""
    if rowLimit != None:
        try:
            whereClause = getObjectIdWhereClause(sourceLayer,rowLimit)
        except:
            dla.addMessage("Unable to obtain where clause to Preview " + sourceLayer + ", continuing with all records")
            
    if whereClause != '' and whereClause != ' ':
        dla.addMessage("Where " + str(whereClause))
    sourceName = dla.getSourceName(xmlDoc)
    viewName = sourceName + "_View"
    dla.addMessage(viewName)
    
    targetRef = getSpatialReference(xmlDoc,"Target")
    #sourceRef = getSpatialReference(xmlDoc,"Source")
    
    if targetRef != '':
            
        arcpy.env.workspace = workspace
        view = dla.makeFeatureView(dla.workspace,sourceLayer,viewName,whereClause,xmlFields)
        dla.addMessage("View Created")            
        count = arcpy.GetCount_management(view).getOutput(0)
        dla.addMessage(str(count) + " source rows")

        arcpy.env.overwriteOutput = True
        arcpy.CreateFeatureclass_management(workspace,targetName,template=sourceLayer,spatial_reference=targetRef)
        fc = workspace + os.sep + targetName
        arcpy.Append_management(view,fc,schema_type="NO_TEST")
        dla.addMessage(arcpy.GetMessages(2)) # only serious errors
        count = arcpy.GetCount_management(fc).getOutput(0)
        dla.addMessage(str(count) + " source rows exported to " + targetName)
        if str(count) == '0':
            result = False
            dla.addError("Failed to load to " + targetName + ", it is likely that your data falls outside of the target Spatial Reference Extent")
            dla.addMessage("To verify please use the Append tool to load some data to the target dataset")
    return result
Пример #32
0
def exportDataset(xmlDoc, sourceLayer, workspace, targetName, rowLimit):
    result = True
    xmlFields = xmlDoc.getElementsByTagName("Field")
    dla.addMessage("Exporting Layer from " + sourceLayer)
    whereClause = ""
    if rowLimit != None:
        # try:
        whereClause = getObjectIdWhereClause(sourceLayer, rowLimit)
        # except:
        #    dla.addMessage("Unable to obtain where clause to Preview " + sourceLayer + ", continuing with all records")

    if whereClause != "" and whereClause != " ":
        # dla.addMessage("rowLimit " + str(rowLimit))
        dla.addMessage("Where " + str(whereClause))
    sourceName = dla.getSourceName(xmlDoc)
    viewName = sourceName + "_View"
    dla.addMessage(viewName)

    targetRef = getSpatialReference(xmlDoc, "Target")
    # sourceRef = getSpatialReference(xmlDoc,"Source")

    if targetRef != "":

        arcpy.env.workspace = workspace
        view = dla.makeFeatureView(dla.workspace, sourceLayer, viewName, whereClause, xmlFields)
        dla.addMessage("View Created")
        srcCount = arcpy.GetCount_management(view).getOutput(0)
        dla.addMessage(str(srcCount) + " source rows")
        if srcCount == 0:
            result = False
            dla.addError("Failed to extract " + sourceName + ", Nothing to export")
        else:
            arcpy.env.preserveGlobalIds = False  # need to run this way until support added for GlobalIDs
            # dla.addMessage("names: " + sourceName + "|" + targetName)
            arcpy.env.overwriteOutput = True
            try:
                arcpy.CreateFeatureclass_management(
                    workspace, targetName, template=sourceLayer, spatial_reference=targetRef
                )
            except:
                arcpy.AddError(
                    "Unable to create intermediate feature class, exiting: " + workspace + os.sep + targetName
                )
                return False
            fc = workspace + os.sep + targetName
            arcpy.Append_management(view, fc, schema_type="NO_TEST")
            dla.addMessage(arcpy.GetMessages(2))  # only serious errors
            count = arcpy.GetCount_management(fc).getOutput(0)
            dla.addMessage(str(count) + " source rows exported to " + targetName)
            if str(count) == "0":
                result = False
                dla.addError(
                    "Failed to load to "
                    + targetName
                    + ", it is likely that your data falls outside of the target Spatial Reference Extent"
                )
                dla.addMessage("To verify please use the Append tool to load some data to the target dataset")
    return result
Пример #33
0
def calculate(xmlFileName,workspace,name,ignore):

    dla.workspace = workspace    
    success = True
    arcpy.ClearWorkspaceCache_management(dla.workspace)
    xmlDoc = dla.getXmlDoc(xmlFileName)
    dla.addMessage("Field Calculator: " + xmlFileName)
    arcpy.env.Workspace = dla.workspace
    table = dla.getTempTable(name)

    if not arcpy.Exists(table):
        dla.addError("Feature Class " + table + " does not exist, exiting")
        arcpy.SetParameter(SUCCESS, False)
        return
    if not arcpy.TestSchemaLock(table):
        dla.addError("Unable to obtain a schema lock for " + table + ", exiting")
        arcpy.SetParameter(SUCCESS, False)
        return -1
    
    desc = arcpy.Describe(table)
    fields = dla.getXmlElements(xmlFileName,"Field")
    sourceFields = dla.getXmlElements(xmlFileName,"SourceField")
    targetFields = dla.getXmlElements(xmlFileName,"TargetField")
    attrs = [f.name for f in arcpy.ListFields(table)]

    for field in fields:
        arcpy.env.Workspace = dla.workspace
        targetName = dla.getNodeValue(field,"TargetName")
        sourceName = dla.getNodeValue(field,"SourceName")
            
        ftype = "String"
        length = "50"
        for target in targetFields:
            nm = target.getAttributeNode("Name").nodeValue
            if  nm == targetName:
                ftype = target.getAttributeNode("Type").nodeValue
                length = target.getAttributeNode("Length").nodeValue
        # uppercase compare, later need to check for orig/upper name for calc
        #ups = [nm.upper() for nm in attrs]
        dla.addDlaField(table,targetName,field,attrs,ftype,length)

    allFields = sourceFields + targetFields
    desc = arcpy.Describe(table)
    layerNames = []
    names = []
    ftypes = []
    lengths = []
    ignore = ['FID','OBJECTID','GLOBALID','SHAPE','SHAPE_AREA','SHAPE_LENGTH','SHAPE_LEN','STLENGTH()','STAREA()','raster']
    for name in ['OIDFieldName','ShapeFieldName','LengthFieldName','AreaFieldName','GlobalIDFieldName','RasterFieldName']:
        try:
            val = eval("desc." + name)
            val = val[val.rfind('.')+1:] 
            ignore.append(val).upper()
        except:
            pass

    for field in desc.fields:
        if field.name.upper() not in ignore:
            layerNames.append(field.name.upper())

    for field in allFields:
        nm = field.getAttributeNode("Name").nodeValue
        if nm != dla.noneName and nm.upper() not in ignore and nm.upper() in layerNames:
            try:
                names.index(nm)
            except:
                names.append(nm)
                typ = field.getAttributeNode("Type").nodeValue
                leng = field.getAttributeNode("Length").nodeValue      
                ftypes.append(typ)
                lengths.append(leng)

    retVal = setFieldValues(table,fields,names,ftypes,lengths)
    if retVal == False:
        success = False
    arcpy.ClearWorkspaceCache_management(dla.workspace)
    dla.cleanupGarbage()

    arcpy.ResetProgressor()
    if ignore == True:
        success = True
    return success
Пример #34
0
def addFeatures(sourceLayer, targelUrl, expr):
    # add features using _chunkSize
    retval = False
    error = False
    # add section
    try:
        arcpy.SetProgressor("default", "Adding Features")
        arcpy.SetProgressorLabel("Adding Features")
        featurejs = featureclass_to_json(sourceLayer)
        url = targelUrl + '/addFeatures'
        numFeat = len(featurejs['features'])
        if numFeat == 0:
            dla.addMessage("0 Features to Add, exiting")
            return True  # nothing to add is OK
        if numFeat > _chunkSize:
            chunk = _chunkSize
        else:
            chunk = numFeat
        featuresProcessed = 0
        while featuresProcessed < numFeat and error == False:
            next = featuresProcessed + chunk
            features = featurejs['features'][featuresProcessed:next]
            msg = "Adding features " + str(featuresProcessed) + ":" + str(next)
            dla.addMessage(msg)
            arcpy.SetProgressorLabel(msg)
            token = dla.getSigninToken()
            params = {
                'rollbackonfailure': 'true',
                'f': 'json',
                'token': token,
                'features': json.dumps(features)
            }
            result = dla.sendRequest(url, params)
            try:
                if result['error'] != None:
                    retval = False
                    dla.addMessage("Add features to Feature Service failed")
                    dla.addMessage(json.dumps(result))
                    error = True
            except:
                try:
                    lenAdded = len(result['addResults'])
                    msg = str(lenAdded) + " features added, " + str(
                        featuresProcessed + chunk) + "/" + str(numFeat)
                    print(msg)
                    dla.addMessage(msg)
                    retval = True
                except:
                    retval = False
                    dla.addMessage("Add features to Feature Service failed")
                    dla.showTraceback()
                    dla.addError(json.dumps(result))
                    error = True
            featuresProcessed += chunk
    except:
        retval = False
        dla.addMessage("Add features to Feature Service failed")
        dla.showTraceback()
        error = True
        pass

    return retval
def exportDataset(xmlDoc,sourceLayer,workspace,targetName,rowLimit):
    result = True
    xmlFields = xmlDoc.getElementsByTagName("Field")
    dla.addMessage("Exporting Layer from " + sourceLayer)
    whereClause = ""
    if rowLimit != None:
        try:
            whereClause = getObjectIdWhereClause(sourceLayer,rowLimit)
        except:
            dla.addMessage("Unable to obtain where clause to Preview " + sourceLayer + ", continuing with all records")
            
    if whereClause != '' and whereClause != ' ':
        dla.addMessage("Where " + str(whereClause))
    sourceName = dla.getSourceName(xmlDoc)
    viewName = sourceName + "_View"
    dla.addMessage(viewName)
    
    targetRef = getSpatialReference(xmlDoc,"Target")
    #sourceRef = getSpatialReference(xmlDoc,"Source")
    
    if targetRef != '':
            
        arcpy.env.workspace = workspace
        view = dla.makeFeatureView(dla.workspace,sourceLayer,viewName,whereClause,xmlFields)
        dla.addMessage("View Created")            
        count = arcpy.GetCount_management(view).getOutput(0)
        dla.addMessage(str(count) + " source rows")

        arcpy.env.overwriteOutput = True
        arcpy.CreateFeatureclass_management(workspace,targetName,template=sourceLayer,spatial_reference=targetRef)
        fc = workspace + os.sep + targetName
        arcpy.Append_management(view,fc,schema_type="NO_TEST")
        dla.addMessage(arcpy.GetMessages(2)) # only serious errors
        count = arcpy.GetCount_management(fc).getOutput(0)
        dla.addMessage(str(count) + " source rows exported to " + targetName)
        if str(count) == '0':
            result = False
            dla.addError("Failed to load to " + targetName + ", it is likely that your data falls outside of the target Spatial Reference Extent")
            dla.addMessage("To verify please use the Append tool to load some data to the target dataset")
    return result
def writeDocument(sourceDataset, targetDataset, xmlFileName):

    desc = arcpy.Describe(sourceDataset)
    descT = arcpy.Describe(targetDataset)
    sourcePath = getLayerPath(desc)
    targetPath = getLayerPath(descT)

    ## Added May2016. warn user if capabilities are not correct, exit if not a valid layer
    if not dla.checkServiceCapabilities(sourcePath, False):
        dla.addMessage(
            sourceDataset +
            ' Does not appear to be a feature service layer, exiting. Check that you selected a layer not a service'
        )
        return False
    if not dla.checkServiceCapabilities(targetPath, False):
        dla.addMessage(
            targetDataset +
            ' Does not appear to be a feature service layer, exiting. Check that you selected a layer not a service'
        )
        return False

    xmlDoc = Document()
    root = xmlDoc.createElement('SourceTargetMatrix')
    xmlDoc.appendChild(root)
    root.setAttribute("version", '1.1')
    root.setAttribute("xmlns:esri", 'http://www.esri.com')

    dataset = xmlDoc.createElement("Datasets")
    root.appendChild(dataset)
    setSourceTarget(dataset, xmlDoc, "Source", sourcePath)
    setSourceTarget(dataset, xmlDoc, "Target", targetPath)

    setSpatialReference(dataset, xmlDoc, desc, "Source")
    setSpatialReference(dataset, xmlDoc, descT, "Target")

    setSourceTarget(dataset, xmlDoc, "ReplaceBy", "")

    fieldroot = xmlDoc.createElement("Fields")
    root.appendChild(fieldroot)

    fields = getFields(descT, targetDataset)
    sourceFields = getFields(desc, sourceDataset)
    sourceNames = [
        field.name[field.name.rfind(".") + 1:] for field in sourceFields
    ]
    upperNames = [nm.upper() for nm in sourceNames]

    #try:
    for field in fields:

        fNode = xmlDoc.createElement("Field")
        fieldroot.appendChild(fNode)
        fieldName = field.name[field.name.rfind(".") + 1:]
        matchSourceFields(xmlDoc, fNode, field, fieldName, sourceNames,
                          upperNames)

    # write the source field values
    setSourceFields(root, xmlDoc, sourceFields)
    setTargetFields(root, xmlDoc, fields)
    # Should add a template section for value maps, maybe write domains...
    # could try to preset field mapping and domain mapping...

    # add some data to the document
    writeDataSample(xmlDoc, root, sourceNames, sourceDataset, 10)
    # write it out
    xmlDoc.writexml(open(xmlFileName, 'w'),
                    indent="  ",
                    addindent="  ",
                    newl='\n')
    xmlDoc.unlink()
Пример #37
0
def checkServiceCapabilities(pth, checklist):
    res = False
    if pth == None:
        dla.addMessage('Error: No path available for layer')
        return False
    if checkLayerIsService(pth):
        url = pth
        if isFeatureLayerUrl(url):
            data = arcpy.GetSigninToken()
            token = data['token']
            name = getServiceName(url)
            if name != None:
                # checklist is a list like: ['Create','Delete']
                res = hasCapabilities(url, token, checklist)
                if res != True:
                    dla.addMessage('WARNING: ' + name + ' does not have ' +
                                   '+'.join(checklist) + ' privileges')
                    dla.addMessage('Verify the service properties for: ' + url)
                    dla.addMessage(
                        'This tool will not run until this is addressed')
                return res
            else:
                dla.addMessage("Unable to retrieve Service name from the url")
                return res
        else:
            dla.addMessage(
                pth +
                ' Does not appear to be a feature service layer, exiting. Check that you selected a layer not a service'
            )
            return False
    else:
        return None  # if it's not a service return None
Пример #38
0
def addRows(source, targetUrl, expr):
    # add rows using _chunkSize
    retval = False
    error = False
    # add section
    try:
        arcpy.SetProgressor("default", "Adding Rows")
        arcpy.SetProgressorLabel("Adding Rows")
        rowjs = rowsToJson(source)
        url = targetUrl + '/addFeatures'
        try:
            numFeat = len(rowjs['features'])
        except:
            numFeat = 0
        if numFeat == 0:
            dla.addMessage("0 Rows to Add, exiting")
            return True  # nothing to add is OK
        if numFeat > _chunkSize:
            chunk = _chunkSize
        else:
            chunk = numFeat
        rowsProcessed = 0
        while rowsProcessed < numFeat and error == False:
            next = rowsProcessed + chunk
            rows = rowjs['features'][rowsProcessed:next]
            msg = "Adding rows " + str(rowsProcessed) + ":" + str(next)
            dla.addMessage(msg)
            arcpy.SetProgressorLabel(msg)
            token = getSigninToken()
            params = {
                'rollbackonfailure': 'true',
                'f': 'json',
                'token': token,
                'features': json.dumps(rows)
            }
            result = sendRequest(url, params)
            try:
                if result['error'] != None:
                    retval = False
                    dla.addMessage("Add rows to Service failed")
                    dla.addMessage(json.dumps(result))
                    error = True
            except:
                try:
                    lenAdded = len(result['addResults'])
                    total = rowsProcessed + chunk
                    if total > numFeat:
                        total = numFeat
                    msg = str(lenAdded) + " rows added, " + str(
                        total) + "/" + str(numFeat)
                    print(msg)
                    dla.addMessage(msg)
                    retval = True
                except:
                    retval = False
                    dla.addMessage(
                        "Add rows to Service failed. Unfortunately you will need to re-run this tool."
                    )
                    #dla.showTraceback()
                    #dla.addError(json.dumps(result))
                    error = True
            rowsProcessed += chunk
    except:
        retval = False
        dla.addMessage("Add rows to Service failed")
        dla.showTraceback()
        error = True
        pass

    return retval
Пример #39
0
def setProgressor(i,numFeat):
    if i % 100 == 0:
        dla.addMessage("Feature " + str(i) + " processed")
    if i % getProgressUpdate(numFeat) == 0:
        arcpy.SetProgressorPosition(i)
def setFieldValues(table, fields, names, types, lengths):
    # from source xml file match old values to new values to prepare for append to target geodatabase
    success = False
    row = None
    try:
        updateCursor = arcpy.da.UpdateCursor(table, names)

        result = arcpy.GetCount_management(table)
        numFeat = int(result.getOutput(0))
        dla.addMessage(table + ", " + str(numFeat) + " features")
        i = 0
        arcpy.SetProgressor("Step", "Calculating " + table + "...", 0, numFeat,
                            getProgressUpdate(numFeat))

        for row in updateCursor:
            success = True
            if dla._errCount > dla.maxErrorCount:
                #dla.addMessage("Exceeded max number of errors in dla.maxErrorCount: " + str(dla.maxErrorCount))
                dla.addError(
                    "Exceeded max number of errors in dla.maxErrorCount: " +
                    str(dla.maxErrorCount))
                return False
            if i > dla.maxrows:
                #dla.addMessage("Exceeded max number of rows supported in dla.maxrows: " + str(dla.maxrows))
                dla.addError(
                    "Exceeded max number of rows supported in dla.maxrows: " +
                    str(dla.maxrows))
                return True
            i = i + 1
            setProgressor(i, numFeat)
            for field in fields:
                method = "None"
                sourceName = dla.getNodeValue(field, "SourceName")
                targetName = dla.getNodeValue(field, "TargetName")

                targetValue = getTargetValue(row, field, names, sourceName,
                                             targetName)
                sourceValue = getSourceValue(row, names, sourceName,
                                             targetName)
                method = dla.getNodeValue(field, "Method").replace(" ", "")
                if method == "None" or (method == "Copy"
                                        and sourceName == '(None)'):
                    method = "None"
                    val = None
                elif method == "Copy":
                    val = sourceValue
                elif method == "DefaultValue":
                    val = dla.getNodeValue(field, "DefaultValue")
                elif method == "SetValue":
                    val = dla.getNodeValue(field, "SetValue")
                elif method == "ValueMap":
                    val = getValueMap(row, names, sourceValue, field)
                elif method == "ChangeCase":
                    case = dla.getNodeValue(field, method)
                    expression = getChangeCase(sourceValue, case)
                    val = getExpression(row, names, expression)
                elif method == "Concatenate":
                    val = getConcatenate(row, names, field)
                elif method == "Left":
                    chars = dla.getNodeValue(field, "Left")
                    val = getSubstring(sourceValue, "0", chars)
                elif method == "Right":
                    chars = dla.getNodeValue(field, "Right")
                    val = getSubstring(sourceValue,
                                       len(str(sourceValue)) - int(chars),
                                       len(str(sourceValue)))
Пример #41
0
def calculate(xmlFileName, workspace, name, ignore):

    dla.workspace = workspace
    success = True
    arcpy.ClearWorkspaceCache_management(dla.workspace)
    xmlDoc = dla.getXmlDoc(xmlFileName)
    dla.addMessage("Field Calculator: " + xmlFileName)
    arcpy.env.Workspace = dla.workspace
    table = dla.getTempTable(name)

    if not arcpy.Exists(table):
        dla.addError("Feature Class " + table + " does not exist, exiting")
        arcpy.SetParameter(SUCCESS, False)
        return
    if not arcpy.TestSchemaLock(table):
        dla.addError("Unable to obtain a schema lock for " + table +
                     ", exiting")
        arcpy.SetParameter(SUCCESS, False)
        return -1

    desc = arcpy.Describe(table)
    fields = dla.getXmlElements(xmlFileName, "Field")
    sourceFields = dla.getXmlElements(xmlFileName, "SourceField")
    targetFields = dla.getXmlElements(xmlFileName, "TargetField")
    attrs = [f.name for f in arcpy.ListFields(table)]
    target_values = CaseInsensitiveDict()

    #Fix read into dict, using NM as key
    # at this point just getting the list of all target field names/types/lengths
    for target in targetFields:
        nm = target.getAttributeNode("Name").nodeValue
        target_values[nm] = dict(
            ftype=target.getAttributeNode("Type").nodeValue,
            flength=target.getAttributeNode("Length").nodeValue)

    for field in fields:
        arcpy.env.Workspace = dla.workspace
        targetName = dla.getNodeValue(field, "TargetName")
        sourceName = dla.getNodeValue(field, "SourceName")

        ftype = "String"
        flength = "50"
        if targetName in target_values:
            ftype = target_values[targetName]['ftype']
            flength = target_values[targetName]['flength']

        # make sure the field exists in the field calculator dataset, this will include all source and target fields.
        retcode = dla.addDlaField(table, targetName, field, attrs, ftype,
                                  flength)
        if retcode == False:
            addError("Unable to add field " + targetName +
                     " to database to calculate values, exiting")

    allFields = sourceFields + targetFields  # this should be the same as the dataset fields at this point
    desc = arcpy.Describe(table)
    layerNames = []
    names = []
    ftypes = []
    lengths = []
    ignore = dla.getIgnoreFieldNames(
        desc
    )  # gdb system fields that will be handled automatically and cannot be calculated
    ignore = [nm.upper() for nm in ignore]

    for field in desc.fields:  # get the uppercase names for everything that exists in the dataset
        if field.name.upper() not in ignore:
            layerNames.append(field.name.upper())

    for field in allFields:  # loop through everything that might exist
        nm = field.getAttributeNode("Name").nodeValue.replace(
            '.', '_')  #  handle joins and remaining . in field names
        if nm != dla._noneFieldName and nm.upper() not in ignore and nm.upper(
        ) in layerNames:  # ignore the None and ignore fields and names not in the dataset
            idx = dla.getFieldIndexList(names, nm)
            if idx is None:  # if the name is not already in the list
                names.append(nm)
                typ = field.getAttributeNode("Type").nodeValue
                leng = field.getAttributeNode("Length").nodeValue
                ftypes.append(typ)
                lengths.append(leng)

            #FIXME : Steve, was not sure why you were capturing an error here, and then doing something # from Steve - was looking for names that actually exist in the dataset and are not gdb system fields. No guarantee Xml matches dataset
            #try:
            #names.index(nm)
            #except:
            #names.append(nm)
            #typ = field.getAttributeNode("Type").nodeValue
            #leng = field.getAttributeNode("Length").nodeValue
            #ftypes.append(typ)
            #lengths.append(leng)
    retVal = setFieldValues(table, fields, names, ftypes, lengths)
    if retVal == False:
        success = False
    arcpy.ClearWorkspaceCache_management(dla.workspace)
    dla.cleanupGarbage()

    arcpy.ResetProgressor()
    if ignore == True:
        success = True
    return success
Пример #42
0
def deleteFeatures(sourceLayer, targelUrl, expr):
    # delete features using chunks of _chunkSize
    retval = False
    error = False
    # delete section
    ids = getOIDs(targelUrl, expr)
    try:
        lenDeleted = 100
        #Chunk deletes using chunk size at a time
        featuresProcessed = 0
        numFeat = len(ids)
        if numFeat == 0:
            dla.addMessage("0 Features to Delete, exiting")
            return True  # nothing to delete is OK
        if numFeat > _chunkSize:
            chunk = _chunkSize
        else:
            chunk = numFeat
        arcpy.SetProgressor("default", "Deleting Features")
        while featuresProcessed < numFeat and error == False:
            #Chunk deletes using chunk size at a time
            next = featuresProcessed + chunk
            msg = "Deleting features " + str(featuresProcessed) + ":" + str(
                next)
            dla.addMessage(msg)
            arcpy.SetProgressorLabel(msg)
            oids = ",".join(str(e) for e in ids[featuresProcessed:next])
            url = targelUrl + '/deleteFeatures'
            token = dla.getSigninToken()
            params = {'f': 'pjson', 'objectIds': oids, 'token': token}
            result = dla.sendRequest(url, params)
            try:
                if result['error'] != None:
                    retval = False
                    dla.addMessage(
                        "Delete features from Feature Service failed")
                    dla.addMessage(json.dumps(result))
                    error = True
            except:
                try:
                    lenDeleted = len(result['deleteResults'])
                    msg = str(lenDeleted) + " features deleted, " + str(
                        featuresProcessed + chunk) + "/" + str(numFeat)
                    print(msg)
                    dla.addMessage(msg)
                    retval = True
                except:
                    retval = False
                    error = True
                    dla.showTraceback()
                    dla.addMessage(
                        "Delete features from Feature Service failed")
                    dla.addError(json.dumps(result))
            featuresProcessed += chunk
    except:
        retval = False
        error = True
        dla.showTraceback()
        dla.addMessage("Delete features from Feature Service failed")
        pass

    return retval
def setProgressor(i, numFeat):
    if i % 100 == 0:
        dla.addMessage("Feature " + str(i) + " processed")
    if i % getProgressUpdate(numFeat) == 0:
        arcpy.SetProgressorPosition(i)
Пример #44
0
def publish(xmlFileNames):
    # function called from main or from another script, performs the data update processing
    global sourceLayer, targetLayer, _success
    dla._errorCount = 0

    arcpy.SetProgressor("default", "Data Assistant")
    arcpy.SetProgressorLabel("Data Assistant")
    xmlFiles = xmlFileNames.split(";")
    for xmlFile in xmlFiles:  # multi value parameter, loop for each file
        dla.addMessage("Configuration file: " + xmlFile)
        xmlDoc = dla.getXmlDoc(xmlFile)  # parse the xml document
        if xmlDoc == None:
            return
        svceS = False
        svceT = False
        if sourceLayer == "" or sourceLayer == None:
            sourceLayer = dla.getNodeValue(xmlDoc, "Source")
            svceS = dla.checkLayerIsService(sourceLayer)
        if targetLayer == "" or targetLayer == None:
            targetLayer = dla.getNodeValue(xmlDoc, "Target")
            svceT = dla.checkLayerIsService(targetLayer)

        dla.addMessage(targetLayer)
        ## Added May2016. warn user if capabilities are not correct, exit if not a valid layer
        if not dla.checkServiceCapabilities(sourceLayer, True):
            return False
        if not dla.checkServiceCapabilities(targetLayer, True):
            return False

        if svceS == True or svceT == True:
            token = dla.getSigninToken(
            )  # when signed in get the token and use this. Will be requested many times during the publish
            if token == None:
                dla.addError(
                    "User must be signed in for this tool to work with services"
                )
                return

        expr = getWhereClause(xmlDoc)
        if useReplaceSettings == True and (expr == '' or expr == None):
            dla.addError(
                "There must be an expression for replacing by field value, current value = "
                + str(expr))
            return False

        dla.setWorkspace()
        targetName = dla.getTargetName(xmlDoc)
        res = dlaExtractLayerToGDB.extract(xmlFile, None, dla.workspace,
                                           sourceLayer, targetName)
        if res != True:
            table = dla.getTempTable(targetName)
            msg = "Unable to export data, there is a lock on existing datasets or another unknown error"
            if arcpy.TestSchemaLock(table) != True:
                msg = "Unable to export data, there is a lock on the intermediate feature class: " + table
            dla.addError(msg)
            print(msg)
            return
        else:
            res = dlaFieldCalculator.calculate(xmlFile, dla.workspace,
                                               targetName, False)
            if res == True:
                dlaTable = dla.getTempTable(targetName)
                res = doPublish(xmlDoc, dlaTable, targetLayer)

        arcpy.ResetProgressor()
        sourceLayer = None  # set source and target back to None for multiple file processing
        targetLayer = None
        if res == False:
            err = "Data Assistant Update Failed, see messages for details"
            dla.addError(err)
            print(err)
Пример #45
0
def stage(xmlFileNames):
    global source, target, rowLimit

    dla.setWorkspace()
    dla._errCount = 0
    outlayers = []

    for xmlFileName in xmlFileNames.split(';'):
        xmlFileName = dla.getXmlDocName(xmlFileName)
        xmlDoc = dla.getXmlDoc(xmlFileName)
        prj = dla.setProject(xmlFileName, dla.getNodeValue(xmlDoc, "Project"))
        if prj == None:
            dla.addError(
                "Unable to open your project, please ensure it is in the same folder as your current project or your Config file"
            )

        if rowLimit == "" or rowLimit == None:
            rowLimit = None
        if source == "" or source == None:
            source = dla.getDatasetPath(xmlDoc, "Source")
        if target == "" or target == None:
            target = dla.getDatasetPath(xmlDoc, "Target")

        if dla.isTable(source) or dla.isTable(target):
            datasetType = 'Table'
        else:
            datasetType = 'FeatureClass'

        targetName = dla.getStagingName(source, target)
        targetDS = os.path.join(dla.workspace, targetName)

        res = dlaExtractLayerToGDB.extract(xmlFileName, rowLimit,
                                           dla.workspace, source, targetDS,
                                           datasetType)
        if res == True:
            res = dlaFieldCalculator.calculate(xmlFileName, dla.workspace,
                                               targetName, False)

            if res == True:
                arcpy.env.addOutputsToMap = True
                layer = targetName
                layertmp = targetName + "tmp"
                if arcpy.Exists(layertmp):
                    arcpy.Delete_management(layertmp)
                if dla.isTable(targetDS):
                    arcpy.MakeTableView_management(targetDS, layertmp)
                else:
                    arcpy.MakeFeatureLayer_management(targetDS, layertmp)
                fieldInfo = dla.getLayerVisibility(layertmp, xmlFileName)
                if dla.isTable(targetDS):
                    arcpy.MakeTableView_management(targetDS, layer, None,
                                                   dla.workspace, fieldInfo)
                else:
                    arcpy.MakeFeatureLayer_management(targetDS, layer, None,
                                                      dla.workspace, fieldInfo)
                # should make only the target fields visible
                outlayers.append(layer)
                ### *** need to insert tag in xml file...
                dla.insertStagingElement(xmlDoc)
                try:
                    xmlDoc.writexml(open(xmlFileName, 'wt', encoding='utf-8'))
                    dla.addMessage('Staging element written to config file')
                except:
                    dla.addMessage("Unable to write data to xml file")
                xmlDoc.unlink()
        else:
            dla.addError("Failed to Extract data")
            print("Failed to Extract data")
    if outlayers != []:
        arcpy.SetParameter(_derived, ";".join(outlayers))
    dla.writeFinalMessage("Data Assistant - Stage")