Exemple #1
0
def preview(xmlFileName):
    global sourceLayer,targetLayer,rowLimit

    dla.setWorkspace()
    dla._errCount = 0

    xmlDoc = dla.getXmlDoc(xmlFileName)
    #arcpy.AddMessage("rowLimit = " + str(rowLimit) )
    if rowLimit == "" or rowLimit == None:
        rowLimit = 100
    if sourceLayer == "" or sourceLayer == None:
        sourceLayer = dla.getNodeValue(xmlDoc,"Source")
    if targetLayer == "" or targetLayer == None:
        targetLayer = dla.getNodeValue(xmlDoc,"Target")
        dte = datetime.datetime.now().strftime("%Y%m%d%H%M")
        targetName = dla.getTargetName(xmlDoc) + dte
        targetFC = os.path.join(dla.workspace,targetName)
    res = dlaExtractLayerToGDB.extract(xmlFileName,rowLimit,dla.workspace,sourceLayer,targetFC)
    if res == True:
        res = dlaFieldCalculator.calculate(xmlFileName,dla.workspace,targetName,False)
        if res == True:
            arcpy.env.addOutputsToMap = True
            layer = targetName
            layertmp = targetName + "tmp"
            if arcpy.Exists(layertmp):
                arcpy.Delete_management(layertmp)               
            arcpy.MakeFeatureLayer_management(targetFC,layertmp)
            fieldInfo = dla.getLayerVisibility(layertmp,xmlFileName)
            arcpy.MakeFeatureLayer_management(targetFC,layer,None,dla.workspace,fieldInfo)
            # should make only the target fields visible
            arcpy.SetParameter(_success,layer)
    else:
        dla.addError("Failed to Extract data")
        print("Failed to Extract data")
    dla.writeFinalMessage("Data Assistant - Preview")
def main(argv = None):
    xmlDoc = dla.getXmlDoc(xmlFileName)
    targetName = dla.getTargetName(xmlDoc)
    success = calculate(xmlFileName,dla.workspace,targetName,False)
    if success == False:
        dla.addError("Errors occurred during field calculation")
    arcpy.SetParameter(SUCCESS, success)
Exemple #3
0
def preview(xmlFileName):
    global source, target, rowLimit

    dla.setWorkspace()
    dla._errCount = 0

    xmlFileName = dla.getXmlDocName(xmlFileName)
    xmlDoc = dla.getXmlDoc(xmlFileName)
    #arcpy.AddMessage("rowLimit = " + str(rowLimit) )
    if rowLimit == "" or rowLimit == None:
        rowLimit = 100

    prj = dla.setProject(xmlFileName, dla.getNodeValue(xmlDoc, "Project"))
    if prj == None:
        dla.addError(
            "Unable to open your project, please ensure it is in the same folder as your current project or your Config file"
        )
        return False

    if source == "" or source == None:
        source = dla.getDatasetPath(xmlDoc, "Source")
    if target == "" or target == None:
        target = dla.getDatasetPath(xmlDoc, "Target")

    if dla.isTable(source) or dla.isTable(target):
        datasetType = 'Table'
    else:
        datasetType = 'FeatureClass'
    dte = datetime.datetime.now().strftime("%Y%m%d%H%M")
    targetName = dla.getDatasetName(target) + dte
    targetDS = os.path.join(dla.workspace, targetName)
    res = dlaExtractLayerToGDB.extract(xmlFileName, rowLimit, dla.workspace,
                                       source, targetDS, datasetType)
    if res == True:
        res = dlaFieldCalculator.calculate(xmlFileName, dla.workspace,
                                           targetName, False)

        if res == True:
            arcpy.env.addOutputsToMap = True
            layer = targetName
            layertmp = targetName + "tmp"
            if arcpy.Exists(layertmp):
                arcpy.Delete_management(layertmp)
            if dla.isTable(targetDS):
                arcpy.MakeTableView_management(targetDS, layertmp)
            else:
                arcpy.MakeFeatureLayer_management(targetDS, layertmp)
            fieldInfo = dla.getLayerVisibility(layertmp, xmlFileName)
            if dla.isTable(targetDS):
                arcpy.MakeTableView_management(targetDS, layer, None,
                                               dla.workspace, fieldInfo)
            else:
                arcpy.MakeFeatureLayer_management(targetDS, layer, None,
                                                  dla.workspace, fieldInfo)
            # should make only the target fields visible
            arcpy.SetParameter(_success, layer)
    else:
        dla.addError("Failed to Extract data")
        print("Failed to Extract data")
    dla.writeFinalMessage("Data Assistant - Preview")
def main(argv=None):
    xmlDoc = dla.getXmlDoc(xmlFileName)
    targetName = dla.getTargetName(xmlDoc)
    success = calculate(xmlFileName, dla.workspace, targetName, False)
    if success == False:
        dla.addError("Errors occurred during field calculation")
    arcpy.SetParameter(SUCCESS, success)
def extract(xmlFileName, rowLimit, workspace, sourceLayer, targetFC):

    xmlDoc = dla.getXmlDoc(xmlFileName)
    if workspace == "" or workspace == "#" or workspace == None:
        dla.workspace = arcpy.env.scratchGDB
    else:
        dla.workspace = workspace
    fields = dla.getFields(xmlFileName)
    success = True
    name = ''
    try:
        if not arcpy.Exists(dla.workspace):
            dla.addMessage(dla.workspace +
                           " does not exist, attempting to create")
            dla.createGeodatabase()
        if len(fields) > 0:
            arcpy.SetProgressor("step", "Importing Layer...", 0, 1, 1)

            if sourceLayer == '' or sourceLayer == '#':
                source = dla.getNodeValue(xmlDoc, "Datasets/Source")
            else:
                source = sourceLayer
            if targetFC == '' or targetFC == '#':
                targetName = dla.getTargetName(xmlDoc)
            else:
                targetName = targetFC[targetFC.rfind(os.sep) + 1:]

            sourceName = dla.getSourceName(xmlDoc)
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " +
                                     targetName + "...")
            #if not arcpy.Exists(sourceLayer):
            #    dla.addError("Layer " + sourceLayer + " does not exist, exiting")
            #    return

            retVal = exportDataset(xmlDoc, sourceLayer, dla.workspace,
                                   targetName, rowLimit)
            if retVal == False:
                success = False

        arcpy.SetProgressorPosition()
    except:
        dla.addError("A Fatal Error occurred")
        dla.showTraceback()
        success = False
    finally:
        arcpy.ResetProgressor()
        #arcpy.RefreshCatalog(dla.workspace)
        arcpy.ClearWorkspaceCache_management(dla.workspace)

    return success
def main(argv = None):
    global sourceLayer,targetLayer

    xmlDoc = dla.getXmlDoc(xmlFileName)
    if dla.workspace == "" or dla.workspace == "#" or dla.workspace == None:  
        dla.workspace = arcpy.env.scratchGDB
    if sourceLayer == "" or sourceLayer == None:
        sourceLayer = dla.getNodeValue(xmlDoc,"Source")
    if targetLayer == "" or targetLayer == None:
        targetLayer = dla.getNodeValue(xmlDoc,"Target")
    if success == False:
        dla.addError("Errors occurred during process")

    success = extract(xmlFileName,rowLimit,dla.workspace,sourceLayer,targetLayer)
    arcpy.SetParameter(SUCCESS, success)
def main(argv = None):
    global sourceLayer,targetLayer

    xmlDoc = dla.getXmlDoc(xmlFileName)
    if dla.workspace == "" or dla.workspace == "#" or dla.workspace == None:  
        dla.workspace = arcpy.env.scratchGDB
    if sourceLayer == "" or sourceLayer == None:
        sourceLayer = dla.getNodeValue(xmlDoc,"Source")
    if targetLayer == "" or targetLayer == None:
        targetLayer = dla.getNodeValue(xmlDoc,"Target")
    if success == False:
        dla.addError("Errors occurred during process")

    success = extract(xmlFileName,rowLimit,dla.workspace,sourceLayer,targetLayer)
    arcpy.SetParameter(SUCCESS, success)
def extract(xmlFileName,rowLimit,workspace,sourceLayer,targetFC):          

    xmlDoc = dla.getXmlDoc(xmlFileName)
    if workspace == "" or workspace == "#" or workspace == None:  
        dla.workspace = arcpy.env.scratchGDB
    else:
        dla.workspace = workspace
    fields = dla.getFields(xmlFileName)
    success = True
    name = ''
    try:
        if not arcpy.Exists(dla.workspace):
            dla.addMessage(dla.workspace + " does not exist, attempting to create")
            dla.createGeodatabase()
        if len(fields) > 0:
            arcpy.SetProgressor("step", "Importing Layer...",0,1,1)

            if sourceLayer == '' or sourceLayer == '#':                
                source = dla.getNodeValue(xmlDoc,"Datasets/Source")
            else:
                source = sourceLayer
            if targetFC == '' or targetFC == '#':
                targetName = dla.getTargetName(xmlDoc)
            else:
                targetName = targetFC[targetFC.rfind(os.sep)+1:]

            sourceName = dla.getSourceName(xmlDoc)
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName +"...")
            #if not arcpy.Exists(sourceLayer):
            #    dla.addError("Layer " + sourceLayer + " does not exist, exiting")
            #    return
            
            retVal = exportDataset(xmlDoc,sourceLayer,dla.workspace,targetName,rowLimit)
            if retVal == False:
                success = False

        arcpy.SetProgressorPosition()
    except:
        dla.addError("A Fatal Error occurred")
        dla.showTraceback()
        success = False
    finally:
        arcpy.ResetProgressor()
        #arcpy.RefreshCatalog(dla.workspace)
        arcpy.ClearWorkspaceCache_management(dla.workspace)

    return success
def extract(xmlFileName, rowLimit, workspace, source, target, datasetType):

    xmlDoc = dla.getXmlDoc(xmlFileName)
    if workspace == "" or workspace == "#" or workspace == None:
        dla.workspace = dla.setWorkspace()
    else:
        dla.workspace = workspace
    fields = dla.getFields(xmlFileName)
    success = True
    name = ''
    try:
        if not arcpy.Exists(dla.workspace):
            dla.addMessage(dla.workspace +
                           " does not exist, attempting to create")
            dla.createGeodatabase()
        if len(fields) > 0:
            arcpy.SetProgressor("step", "Importing Layer...", 0, 1, 1)

            targetName = dla.getDatasetName(target)
            sourceName = dla.getDatasetName(source)
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " +
                                     targetName + "...")

            if not arcpy.Exists(source):
                dla.addError("Layer " + source + " does not exist, exiting")
                return

            retVal = exportDataset(xmlDoc, source, dla.workspace, targetName,
                                   rowLimit, datasetType)
            if retVal == False:
                success = False

        arcpy.SetProgressorPosition()
    except:
        dla.addError("A Fatal Error occurred")
        dla.showTraceback()
        success = False
    finally:
        arcpy.ResetProgressor()
        #arcpy.RefreshCatalog(dla.workspace)
        arcpy.ClearWorkspaceCache_management(dla.workspace)

    return success
def main(argv=None):
    global source, target

    xmlDoc = dla.getXmlDoc(xmlFileName)
    if dla.workspace == "" or dla.workspace == "#" or dla.workspace == None:
        dla.workspace = arcpy.env.scratchGDB
    if source == "" or source == None:
        source = dla.getDatasetPath(xmlDoc, "Source")
    if target == "" or target == None:
        target = dla.getDatasetPath(xmlDoc, "Target")
    if success == False:
        dla.addError("Errors occurred during process")
    if dla.isTable(source) or dla.isTable(target):
        datasetType = 'Table'
    else:
        datasetType = 'FeatureClass'

    success = extract(xmlFileName, rowLimit, dla.workspace, source, target,
                      datasetType)
    arcpy.SetParameter(SUCCESS, success)
def preview(xmlFileName):
    global sourceLayer, targetLayer, rowLimit

    dla.setWorkspace()
    dla._errorCount = 0

    xmlDoc = dla.getXmlDoc(xmlFileName)
    if rowLimit == "" or rowLimit == None:
        rowLimit = 100
    if sourceLayer == "" or sourceLayer == None:
        sourceLayer = dla.getNodeValue(xmlDoc, "Source")
    if targetLayer == "" or targetLayer == None:
        targetLayer = dla.getNodeValue(xmlDoc, "Target")
        dte = datetime.datetime.now().strftime("%Y%m%d%H%M")
        targetName = dla.getTargetName(xmlDoc) + dte
        targetFC = os.path.join(dla.workspace, targetName)
    res = dlaExtractLayerToGDB.extract(xmlFileName, rowLimit, dla.workspace,
                                       sourceLayer, targetFC)
    if res == True:
        res = dlaFieldCalculator.calculate(xmlFileName, dla.workspace,
                                           targetName, False)
        if res == True:
            arcpy.env.addOutputsToMap = True
            layer = targetName
            layertmp = targetName + "tmp"
            if arcpy.Exists(layertmp):
                arcpy.Delete_management(layertmp)
            arcpy.MakeFeatureLayer_management(targetFC, layertmp)
            fieldInfo = dla.getLayerVisibility(layertmp, xmlFileName)
            arcpy.MakeFeatureLayer_management(targetFC, layer, None,
                                              dla.workspace, fieldInfo)
            # should make only the target fields visible
            arcpy.SetParameter(_success, layer)
    else:
        dla.addError("Failed to Extract data")
        print("Failed to Extract data")
    dla.writeFinalMessage("Data Assistant - Preview")
def calculate(xmlFileName, workspace, name, ignore):

    dla.workspace = workspace
    success = True
    arcpy.ClearWorkspaceCache_management(dla.workspace)
    xmlDoc = dla.getXmlDoc(xmlFileName)

    arcpy.env.Workspace = dla.workspace
    table = dla.getTempTable(name)

    if not arcpy.Exists(table):
        dla.addError("Feature Class " + table + " does not exist, exiting")
        arcpy.SetParameter(SUCCESS, False)
        return
    if not arcpy.TestSchemaLock(table):
        dla.addError("Unable to obtain a schema lock for " + table +
                     ", exiting")
        arcpy.SetParameter(SUCCESS, False)
        return -1

    desc = arcpy.Describe(table)
    fields = dla.getXmlElements(xmlFileName, "Field")
    sourceFields = dla.getXmlElements(xmlFileName, "SourceField")
    targetFields = dla.getXmlElements(xmlFileName, "TargetField")
    attrs = [f.name for f in arcpy.ListFields(table)]

    for field in fields:
        arcpy.env.Workspace = dla.workspace
        targetName = dla.getNodeValue(field, "TargetName")
        sourceName = dla.getNodeValue(field, "SourceName")

        type = "String"
        length = "50"
        for target in targetFields:
            nm = target.getAttributeNode("Name").nodeValue
            if nm == targetName:
                type = target.getAttributeNode("Type").nodeValue
                length = target.getAttributeNode("Length").nodeValue
        # uppercase compare, later need to check for orig/upper name for calc
        #ups = [nm.upper() for nm in attrs]
        dla.addDlaField(table, targetName, field, attrs, type, length)

    allFields = sourceFields + targetFields
    names = []
    types = []
    lengths = []
    for field in allFields:
        nm = field.getAttributeNode("Name").nodeValue
        if nm != dla.noneName:
            names.append(nm)
            typ = field.getAttributeNode("Type").nodeValue
            leng = field.getAttributeNode("Length").nodeValue
            types.append(typ)
            lengths.append(leng)

    retVal = setFieldValues(table, fields, names, types, lengths)
    if retVal == False:
        success = False
    arcpy.ClearWorkspaceCache_management(dla.workspace)
    dla.cleanupGarbage()

    arcpy.ResetProgressor()
    if ignore == True:
        success = True
    return success
def publish(xmlFileNames):
    # function called from main or from another script, performs the data update processing
    global sourceLayer,targetLayer,_success
    dla._errorCount = 0

    arcpy.SetProgressor("default","Data Assistant")
    arcpy.SetProgressorLabel("Data Assistant")
    xmlFiles = xmlFileNames.split(";")
    for xmlFile in xmlFiles: # multi value parameter, loop for each file
        dla.addMessage("Configuration file: " + xmlFile)
        xmlDoc = dla.getXmlDoc(xmlFile) # parse the xml document
        if xmlDoc == None:
            return
        svceS = False
        svceT = False
        if sourceLayer == "" or sourceLayer == None:
            sourceLayer = dla.getNodeValue(xmlDoc,"Source")
            svceS = dla.checkLayerIsService(sourceLayer)
        if targetLayer == "" or targetLayer == None:
            targetLayer = dla.getNodeValue(xmlDoc,"Target")
            svceT = dla.checkLayerIsService(targetLayer)

        dla.addMessage(targetLayer)
        ## Added May2016. warn user if capabilities are not correct, exit if not a valid layer
        if not dla.checkServiceCapabilities(sourceLayer,True):
            return False
        if not dla.checkServiceCapabilities(targetLayer,True):
            return False

        if svceS == True or svceT == True:
            token = dla.getSigninToken() # when signed in get the token and use this. Will be requested many times during the publish
            if token == None:
                dla.addError("User must be signed in for this tool to work with services")
                return

        expr = getWhereClause(xmlDoc)
        if useReplaceSettings == True and (expr == '' or expr == None):
            dla.addError("There must be an expression for replacing by field value, current value = " + str(expr))
            return False

        dla.setWorkspace()
        targetName = dla.getTargetName(xmlDoc)
        res = dlaExtractLayerToGDB.extract(xmlFile,None,dla.workspace,sourceLayer,targetName)
        if res != True:
            table = dla.getTempTable(targetName)
            msg = "Unable to export data, there is a lock on existing datasets or another unknown error"
            if arcpy.TestSchemaLock(table) != True:
                msg = "Unable to export data, there is a lock on the intermediate feature class: " + table
            dla.addError(msg)
            print(msg)
            return
        else:
            res = dlaFieldCalculator.calculate(xmlFile,dla.workspace,targetName,False)
            if res == True:
                dlaTable = dla.getTempTable(targetName)
                res = doPublish(xmlDoc,dlaTable,targetLayer)

        arcpy.ResetProgressor()
        sourceLayer = None # set source and target back to None for multiple file processing
        targetLayer = None
        if res == False:
            err = "Data Assistant Update Failed, see messages for details"
            dla.addError(err)
            print(err)
def calculate(xmlFileName,workspace,name,ignore):

    dla.workspace = workspace    
    success = True
    arcpy.ClearWorkspaceCache_management(dla.workspace)
    xmlDoc = dla.getXmlDoc(xmlFileName)
    
    arcpy.env.Workspace = dla.workspace
    table = dla.getTempTable(name)

    if not arcpy.Exists(table):
        dla.addError("Feature Class " + table + " does not exist, exiting")
        arcpy.SetParameter(SUCCESS, False)
        return
    if not arcpy.TestSchemaLock(table):
        dla.addError("Unable to obtain a schema lock for " + table + ", exiting")
        arcpy.SetParameter(SUCCESS, False)
        return -1
    
    desc = arcpy.Describe(table)
    fields = dla.getXmlElements(xmlFileName,"Field")
    sourceFields = dla.getXmlElements(xmlFileName,"SourceField")
    targetFields = dla.getXmlElements(xmlFileName,"TargetField")
    attrs = [f.name for f in arcpy.ListFields(table)]

    for field in fields:
        arcpy.env.Workspace = dla.workspace
        targetName = dla.getNodeValue(field,"TargetName")
        sourceName = dla.getNodeValue(field,"SourceName")
            
        type = "String"
        length = "50"
        for target in targetFields:
            nm = target.getAttributeNode("Name").nodeValue
            if  nm == targetName:
                type = target.getAttributeNode("Type").nodeValue
                length = target.getAttributeNode("Length").nodeValue
        # uppercase compare, later need to check for orig/upper name for calc
        #ups = [nm.upper() for nm in attrs]
        dla.addDlaField(table,targetName,field,attrs,type,length)

    allFields = sourceFields + targetFields
    names = []
    types = []
    lengths = []
    for field in allFields:
        nm = field.getAttributeNode("Name").nodeValue
        if nm != dla.noneName:
            names.append(nm)
            typ = field.getAttributeNode("Type").nodeValue
            leng = field.getAttributeNode("Length").nodeValue      
            types.append(typ)
            lengths.append(leng)

    retVal = setFieldValues(table,fields,names,types,lengths)
    if retVal == False:
        success = False
    arcpy.ClearWorkspaceCache_management(dla.workspace)
    dla.cleanupGarbage()

    arcpy.ResetProgressor()
    if ignore == True:
        success = True
    return success
Exemple #15
0
def publish(xmlFileNames):
    # function called from main or from another script, performs the data update processing
    global _useReplaceSettings
    dla._errCount = 0

    arcpy.SetProgressor("default","Data Assistant")
    arcpy.SetProgressorLabel("Data Assistant")
    xmlFiles = xmlFileNames.split(";")
    layers = []

    for xmlFile in xmlFiles: # multi value parameter, loop for each file
        xmlFile = dla.getXmlDocName(xmlFile)
        dla.addMessage("Configuration file: " + xmlFile)
        xmlDoc = dla.getXmlDoc(xmlFile) # parse the xml document
        if xmlDoc == None:
            return
        prj = dla.setProject(xmlFile,dla.getNodeValue(xmlDoc,"Project"))
        if prj == None:
            dla.addError("Unable to open your project, please ensure it is in the same folder as your current project or your Config file")
            return False

        source = dla.getDatasetPath(xmlDoc,"Source")
        target = dla.getDatasetPath(xmlDoc,"Target")
        targetName = dla.getDatasetName(target)
        dla.addMessage(source)
        dla.addMessage(target)

        if dlaService.checkLayerIsService(source) or dlaService.checkLayerIsService(target):
            token = dlaService.getSigninToken() # when signed in get the token and use this. Will be requested many times during the publish
            # exit here before doing other things if not signed in
            if token == None:
                dla.addError("User must be signed in for this tool to work with services")
                return False

        expr = getWhereClause(xmlDoc)
        if _useReplaceSettings == True and (expr == '' or expr == None):
            dla.addError("There must be an expression for replacing by field value, current value = " + str(expr))
            return False

        errs = False
        if dlaService.validateSourceUrl(source) == False:
            dla.addError("Source path does not appear to be a valid feature layer")
            errs = True

        if _useReplaceSettings == True:
            if dlaService.validateTargetReplace(target) == False:
                dla.addError("Target path does not have correct privileges")
                errs = True
        elif _useReplaceSettings == False:
            if dlaService.validateTargetAppend(target) == False:
                dla.addError("Target path does not have correct privileges")
                errs = True

        if errs:
            return False


        dla.setWorkspace()

        if dla.isTable(source) or dla.isTable(target):
            datasetType = 'Table'
        else:
            datasetType = 'FeatureClass'

        if not dla.isStaged(xmlDoc):
            res = dlaExtractLayerToGDB.extract(xmlFile,None,dla.workspace,source,target,datasetType)
            if res != True:
                table = dla.getTempTable(targetName)
                msg = "Unable to export data, there is a lock on existing datasets or another unknown error"
                if arcpy.TestSchemaLock(table) != True and arcpy.Exists(table) == True:
                    msg = "Unable to export data, there is a lock on the intermediate feature class: " + table
                dla.addError(msg)
                print(msg)
                return
            else:
                res = dlaFieldCalculator.calculate(xmlFile,dla.workspace,targetName,False)
                if res == True:
                    dlaTable = dla.getTempTable(targetName)
                    res = doPublish(xmlDoc,dlaTable,target,_useReplaceSettings)
        else:
            dla.addMessage('Data previously staged, will proceed using intermediate dataset')
            dlaTable = dla.workspace + os.sep + dla.getStagingName(source,target)
            res = doPublish(xmlDoc,dlaTable,target,_useReplaceSettings)
            if res == True:
                dla.removeStagingElement(xmlDoc)
                xmlDoc.writexml(open(xmlFile, 'wt', encoding='utf-8'))
                dla.addMessage('Staging element removed from config file')

        arcpy.ResetProgressor()
        if res == False:
            err = "Data Assistant Update Failed, see messages for details"
            dla.addError(err)
            print(err)
        else:
            layers.append(target)

    arcpy.SetParameter(_outParam,';'.join(layers))
Exemple #16
0
def calculate(xmlFileName, workspace, name, ignore):

    dla.workspace = workspace
    success = True
    arcpy.ClearWorkspaceCache_management(dla.workspace)
    xmlDoc = dla.getXmlDoc(xmlFileName)
    dla.addMessage("Field Calculator: " + xmlFileName)
    arcpy.env.Workspace = dla.workspace
    table = dla.getTempTable(name)

    if not arcpy.Exists(table):
        dla.addError("Feature Class " + table + " does not exist, exiting")
        arcpy.SetParameter(SUCCESS, False)
        return
    if not arcpy.TestSchemaLock(table):
        dla.addError("Unable to obtain a schema lock for " + table +
                     ", exiting")
        arcpy.SetParameter(SUCCESS, False)
        return -1

    desc = arcpy.Describe(table)
    fields = dla.getXmlElements(xmlFileName, "Field")
    sourceFields = dla.getXmlElements(xmlFileName, "SourceField")
    targetFields = dla.getXmlElements(xmlFileName, "TargetField")
    attrs = [f.name for f in arcpy.ListFields(table)]
    target_values = CaseInsensitiveDict()

    #Fix read into dict, using NM as key
    # at this point just getting the list of all target field names/types/lengths
    for target in targetFields:
        nm = target.getAttributeNode("Name").nodeValue
        target_values[nm] = dict(
            ftype=target.getAttributeNode("Type").nodeValue,
            flength=target.getAttributeNode("Length").nodeValue)

    for field in fields:
        arcpy.env.Workspace = dla.workspace
        targetName = dla.getNodeValue(field, "TargetName")
        sourceName = dla.getNodeValue(field, "SourceName")

        ftype = "String"
        flength = "50"
        if targetName in target_values:
            ftype = target_values[targetName]['ftype']
            flength = target_values[targetName]['flength']

        # make sure the field exists in the field calculator dataset, this will include all source and target fields.
        retcode = dla.addDlaField(table, targetName, field, attrs, ftype,
                                  flength)
        if retcode == False:
            addError("Unable to add field " + targetName +
                     " to database to calculate values, exiting")

    allFields = sourceFields + targetFields  # this should be the same as the dataset fields at this point
    desc = arcpy.Describe(table)
    layerNames = []
    names = []
    ftypes = []
    lengths = []
    ignore = dla.getIgnoreFieldNames(
        desc
    )  # gdb system fields that will be handled automatically and cannot be calculated
    ignore = [nm.upper() for nm in ignore]

    for field in desc.fields:  # get the uppercase names for everything that exists in the dataset
        if field.name.upper() not in ignore:
            layerNames.append(field.name.upper())

    for field in allFields:  # loop through everything that might exist
        nm = field.getAttributeNode("Name").nodeValue.replace(
            '.', '_')  #  handle joins and remaining . in field names
        if nm != dla._noneFieldName and nm.upper() not in ignore and nm.upper(
        ) in layerNames:  # ignore the None and ignore fields and names not in the dataset
            idx = dla.getFieldIndexList(names, nm)
            if idx is None:  # if the name is not already in the list
                names.append(nm)
                typ = field.getAttributeNode("Type").nodeValue
                leng = field.getAttributeNode("Length").nodeValue
                ftypes.append(typ)
                lengths.append(leng)

            #FIXME : Steve, was not sure why you were capturing an error here, and then doing something # from Steve - was looking for names that actually exist in the dataset and are not gdb system fields. No guarantee Xml matches dataset
            #try:
            #names.index(nm)
            #except:
            #names.append(nm)
            #typ = field.getAttributeNode("Type").nodeValue
            #leng = field.getAttributeNode("Length").nodeValue
            #ftypes.append(typ)
            #lengths.append(leng)
    retVal = setFieldValues(table, fields, names, ftypes, lengths)
    if retVal == False:
        success = False
    arcpy.ClearWorkspaceCache_management(dla.workspace)
    dla.cleanupGarbage()

    arcpy.ResetProgressor()
    if ignore == True:
        success = True
    return success
def calculate(xmlFileName,workspace,name,ignore):

    dla.workspace = workspace    
    success = True
    arcpy.ClearWorkspaceCache_management(dla.workspace)
    xmlDoc = dla.getXmlDoc(xmlFileName)
    dla.addMessage("Field Calculator: " + xmlFileName)
    arcpy.env.Workspace = dla.workspace
    table = dla.getTempTable(name)

    if not arcpy.Exists(table):
        dla.addError("Feature Class " + table + " does not exist, exiting")
        arcpy.SetParameter(SUCCESS, False)
        return
    if not arcpy.TestSchemaLock(table):
        dla.addError("Unable to obtain a schema lock for " + table + ", exiting")
        arcpy.SetParameter(SUCCESS, False)
        return -1
    
    desc = arcpy.Describe(table)
    fields = dla.getXmlElements(xmlFileName,"Field")
    sourceFields = dla.getXmlElements(xmlFileName,"SourceField")
    targetFields = dla.getXmlElements(xmlFileName,"TargetField")
    attrs = [f.name for f in arcpy.ListFields(table)]

    for field in fields:
        arcpy.env.Workspace = dla.workspace
        targetName = dla.getNodeValue(field,"TargetName")
        sourceName = dla.getNodeValue(field,"SourceName")
            
        ftype = "String"
        length = "50"
        for target in targetFields:
            nm = target.getAttributeNode("Name").nodeValue
            if  nm == targetName:
                ftype = target.getAttributeNode("Type").nodeValue
                length = target.getAttributeNode("Length").nodeValue
        # uppercase compare, later need to check for orig/upper name for calc
        #ups = [nm.upper() for nm in attrs]
        dla.addDlaField(table,targetName,field,attrs,ftype,length)

    allFields = sourceFields + targetFields
    desc = arcpy.Describe(table)
    layerNames = []
    names = []
    ftypes = []
    lengths = []
    ignore = ['FID','OBJECTID','GLOBALID','SHAPE','SHAPE_AREA','SHAPE_LENGTH','SHAPE_LEN','STLENGTH()','STAREA()','raster']
    for name in ['OIDFieldName','ShapeFieldName','LengthFieldName','AreaFieldName','GlobalIDFieldName','RasterFieldName']:
        try:
            val = eval("desc." + name)
            val = val[val.rfind('.')+1:] 
            ignore.append(val).upper()
        except:
            pass

    for field in desc.fields:
        if field.name.upper() not in ignore:
            layerNames.append(field.name.upper())

    for field in allFields:
        nm = field.getAttributeNode("Name").nodeValue
        if nm != dla.noneName and nm.upper() not in ignore and nm.upper() in layerNames:
            try:
                names.index(nm)
            except:
                names.append(nm)
                typ = field.getAttributeNode("Type").nodeValue
                leng = field.getAttributeNode("Length").nodeValue      
                ftypes.append(typ)
                lengths.append(leng)

    retVal = setFieldValues(table,fields,names,ftypes,lengths)
    if retVal == False:
        success = False
    arcpy.ClearWorkspaceCache_management(dla.workspace)
    dla.cleanupGarbage()

    arcpy.ResetProgressor()
    if ignore == True:
        success = True
    return success
def publish(xmlFileNames):
    # function called from main or from another script, performs the data update processing
    global sourceLayer, targetLayer, _success
    dla._errorCount = 0

    arcpy.SetProgressor("default", "Data Assistant")
    arcpy.SetProgressorLabel("Data Assistant")
    xmlFiles = xmlFileNames.split(";")
    for xmlFile in xmlFiles:  # multi value parameter, loop for each file
        dla.addMessage("Configuration file: " + xmlFile)
        xmlDoc = dla.getXmlDoc(xmlFile)  # parse the xml document
        if xmlDoc == None:
            return
        svceS = False
        svceT = False
        if sourceLayer == "" or sourceLayer == None:
            sourceLayer = dla.getNodeValue(xmlDoc, "Source")
            svceS = dla.checkLayerIsService(sourceLayer)
        if targetLayer == "" or targetLayer == None:
            targetLayer = dla.getNodeValue(xmlDoc, "Target")
            svceT = dla.checkLayerIsService(targetLayer)

        dla.addMessage(targetLayer)
        ## Added May2016. warn user if capabilities are not correct, exit if not a valid layer
        if not dla.checkServiceCapabilities(sourceLayer, True):
            return False
        if not dla.checkServiceCapabilities(targetLayer, True):
            return False

        if svceS == True or svceT == True:
            token = dla.getSigninToken(
            )  # when signed in get the token and use this. Will be requested many times during the publish
            if token == None:
                dla.addError(
                    "User must be signed in for this tool to work with services"
                )
                return

        expr = getWhereClause(xmlDoc)
        if useReplaceSettings == True and (expr == '' or expr == None):
            dla.addError(
                "There must be an expression for replacing by field value, current value = "
                + str(expr))
            return False

        dla.setWorkspace()
        targetName = dla.getTargetName(xmlDoc)
        res = dlaExtractLayerToGDB.extract(xmlFile, None, dla.workspace,
                                           sourceLayer, targetName)
        if res != True:
            table = dla.getTempTable(targetName)
            msg = "Unable to export data, there is a lock on existing datasets or another unknown error"
            if arcpy.TestSchemaLock(table) != True:
                msg = "Unable to export data, there is a lock on the intermediate feature class: " + table
            dla.addError(msg)
            print(msg)
            return
        else:
            res = dlaFieldCalculator.calculate(xmlFile, dla.workspace,
                                               targetName, False)
            if res == True:
                dlaTable = dla.getTempTable(targetName)
                res = doPublish(xmlDoc, dlaTable, targetLayer)

        arcpy.ResetProgressor()
        sourceLayer = None  # set source and target back to None for multiple file processing
        targetLayer = None
        if res == False:
            err = "Data Assistant Update Failed, see messages for details"
            dla.addError(err)
            print(err)
Exemple #19
0
def stage(xmlFileNames):
    global source, target, rowLimit

    dla.setWorkspace()
    dla._errCount = 0
    outlayers = []

    for xmlFileName in xmlFileNames.split(';'):
        xmlFileName = dla.getXmlDocName(xmlFileName)
        xmlDoc = dla.getXmlDoc(xmlFileName)
        prj = dla.setProject(xmlFileName, dla.getNodeValue(xmlDoc, "Project"))
        if prj == None:
            dla.addError(
                "Unable to open your project, please ensure it is in the same folder as your current project or your Config file"
            )

        if rowLimit == "" or rowLimit == None:
            rowLimit = None
        if source == "" or source == None:
            source = dla.getDatasetPath(xmlDoc, "Source")
        if target == "" or target == None:
            target = dla.getDatasetPath(xmlDoc, "Target")

        if dla.isTable(source) or dla.isTable(target):
            datasetType = 'Table'
        else:
            datasetType = 'FeatureClass'

        targetName = dla.getStagingName(source, target)
        targetDS = os.path.join(dla.workspace, targetName)

        res = dlaExtractLayerToGDB.extract(xmlFileName, rowLimit,
                                           dla.workspace, source, targetDS,
                                           datasetType)
        if res == True:
            res = dlaFieldCalculator.calculate(xmlFileName, dla.workspace,
                                               targetName, False)

            if res == True:
                arcpy.env.addOutputsToMap = True
                layer = targetName
                layertmp = targetName + "tmp"
                if arcpy.Exists(layertmp):
                    arcpy.Delete_management(layertmp)
                if dla.isTable(targetDS):
                    arcpy.MakeTableView_management(targetDS, layertmp)
                else:
                    arcpy.MakeFeatureLayer_management(targetDS, layertmp)
                fieldInfo = dla.getLayerVisibility(layertmp, xmlFileName)
                if dla.isTable(targetDS):
                    arcpy.MakeTableView_management(targetDS, layer, None,
                                                   dla.workspace, fieldInfo)
                else:
                    arcpy.MakeFeatureLayer_management(targetDS, layer, None,
                                                      dla.workspace, fieldInfo)
                # should make only the target fields visible
                outlayers.append(layer)
                ### *** need to insert tag in xml file...
                dla.insertStagingElement(xmlDoc)
                try:
                    xmlDoc.writexml(open(xmlFileName, 'wt', encoding='utf-8'))
                    dla.addMessage('Staging element written to config file')
                except:
                    dla.addMessage("Unable to write data to xml file")
                xmlDoc.unlink()
        else:
            dla.addError("Failed to Extract data")
            print("Failed to Extract data")
    if outlayers != []:
        arcpy.SetParameter(_derived, ";".join(outlayers))
    dla.writeFinalMessage("Data Assistant - Stage")