Beispiel #1
0
def checkValueMaps(dataset,table,field,fieldName,mapName):
    global valueMaps
    method = gzSupport.getNodeValue(field,"Method")
    success = True
    if method == "ValueMap":
        fieldMapName = gzSupport.getNodeValue(field,"ValueMapName")
        otherwise = gzSupport.getNodeValue(field,"ValueMapOtherwise")
        found = False
        for map in valueMaps:
            mapNodeName = map.getAttributeNode("name").nodeValue
            if mapNodeName == fieldMapName and not found:
                found = True # it is possible for the same value map to be present in multiple gizinta project files, just use the first one.
                mapValues = gzSupport.getNodeValue(map,mapName).split(",")
                if otherwise != None and otherwise != '' and otherwise not in mapValues and not otherwise.count(" ") > 2:
                    mapValues.append(otherwise)
                values = gzSupport.getFieldValues("Unique",[fieldName],[dataset])
                uniqueValues = values[0]
                #delta = len(uniqueValues[0]) - len(mapValues)
                mismatch = []
                for uVal in uniqueValues:
                    if uVal not in mapValues:
                        mismatch.append(uVal)
                if len(mismatch) > 0 and not otherwise.count(" ") > 2:
                    gzSupport.addError(str(len(mismatch)) + " mismatches for " + fieldName + ", results located in " + gzSupport.errorTableName)
                    for uVal in mismatch:
                        gzSupport.addError("'" + str(uVal) + "' not found in value map " + str(fieldMapName))
                        gzSupport.logProcessError(table,gzSupport.sourceIDField,"",fieldName,"Mismatched Value Map:" + str(uVal))
                    success = False
                elif len(mismatch) == 0:
                    gzSupport.addMessage("No mismatches found for ValueMaps")
    return success
Beispiel #2
0
def checkValueMaps(dataset,table,field,fieldName,mapName):
    global valueMaps
    method = gzSupport.getNodeValue(field,"Method")
    success = True
    if method == "ValueMap":
        fieldMapName = gzSupport.getNodeValue(field,"ValueMapName")
        otherwise = gzSupport.getNodeValue(field,"ValueMapOtherwise")
        for map in valueMaps:
            mapNodeName = map.getAttributeNode("name").nodeValue
            if mapNodeName == fieldMapName:
                mapValues = gzSupport.getNodeValue(map,mapName).split(",")
                if otherwise != None and otherwise != '' and otherwise not in mapValues and not otherwise.count(" ") > 2:
                    mapValues.append(otherwise)
                values = gzSupport.getFieldValues("Unique",[fieldName],[dataset])
                uniqueValues = values[0]
                #delta = len(uniqueValues[0]) - len(mapValues)
                mismatch = []
                for uVal in uniqueValues:
                    if uVal not in mapValues:
                        mismatch.append(uVal)
                if len(mismatch) > 0 and not otherwise.count(" ") > 2:
                    gzSupport.addError(str(len(mismatch)) + " mismatches for " + fieldName + ", results located in " + gzSupport.errorTableName)
                    for uVal in mismatch:
                        gzSupport.addError("'" + str(uVal) + "' not found in value map " + str(fieldMapName))
                        gzSupport.logProcessError(table,gzSupport.sourceIDField,"",fieldName,"Mismatched Value Map:" + str(uVal))
                    success = False
                elif len(mismatch) == 0:
                    gzSupport.addMessage("No mismatches found for ValueMaps")
    return success
Beispiel #3
0
def findDuplicates(dataset,table,field):
    success = True
    uValues = gzSupport.getFieldValues("Unique",[field],[dataset])
    uniqueValues = uValues[0]
    diffValues = uValues[1]
    fieldValues = gzSupport.getFieldValues("All",[field],[dataset])[0]
    delta = len(diffValues)
    if delta > 0:
        gzSupport.addMessage(str(len(fieldValues)) + " All : " + str(len(uniqueValues)) + " Unique")
        gzSupport.addError(str(delta) + " Duplicates found, results located in " + gzSupport.errorTableName)
        for x in diffValues:
            gzSupport.logProcessError(table,field,str(x),field,"Duplicate Value:" + str(x))
        success = False
    elif delta == 0:
        gzSupport.addMessage("No Duplicates found")

    return success
Beispiel #4
0
def findDuplicates(dataset,table,field):
    success = True
    uValues = gzSupport.getFieldValues("Unique",[field],[dataset])
    uniqueValues = uValues[0]
    diffValues = uValues[1]
    fieldValues = gzSupport.getFieldValues("All",[field],[dataset])[0]
    delta = len(diffValues)
    if delta > 0:
        count = int(arcpy.GetCount_management(dataset).getOutput(0))
        gzSupport.addMessage(str(count) + " rows : " + str(len(uniqueValues)) + " Unique")
        gzSupport.addError(str(delta) + " Duplicates found, results located in " + gzSupport.errorTableName)
        for x in diffValues:
            gzSupport.logProcessError(table,field,str(x),field,"Duplicate Value:" + str(x))
        success = False
    elif delta == 0:
        gzSupport.addMessage("No Duplicates found")

    return success
def main(argv=None):
    global targetWorkspace
    hasVersion = False
    desc = arcpy.Describe(gzSupport.workspace)
    if desc.workspaceType != "RemoteDatabase" and versionName == None:
        targetWorkspace = defaultWorkspace
    success = True
    arcpy.ResetProgressor()
    arcpy.env.Workspace = gzSupport.workspace
    uniqueValues = gzSupport.getFieldValues("Unique", fieldNames, datasets)[0]
    sources = gzSupport.listDatasets(gzSupport.workspace)
    sNames = sources[0]
    sFullNames = sources[1]
    arcpy.SetProgressor("Step", "Load by " + str(fieldNames) + "...", 0, len(uniqueValues) * len(datasets), 1)
    for value in uniqueValues:
        try:
            hasVersion = False
            gzSupport.addMessage(value)
            if desc.workspaceType == "RemoteDatabase" and versionName != None:
                arcpy.SetProgressorLabel("Creating Version " + versionName)
                hasVersion = gzSupport.createVersion(defaultWorkspace, defaultVersionName, versionName)
            if hasVersion == True or versionName == None or desc.workspaceType == "LocalDatabase":
                arcpy.env.Workspace = targetWorkspace
                targets = gzSupport.listDatasets(targetWorkspace)
                tNames = targets[0]
                tFullNames = targets[1]
                for dataset in datasets:
                    name = dataset.getAttributeNode("name").nodeValue
                    arcpy.SetProgressorLabel("Loading Dataset " + name)
                    targetTable = gzSupport.getFullName(name, tNames, tFullNames)
                    sourceTable = gzSupport.getFullName(name, sNames, sFullNames)
                    attrs = [f.name for f in arcpy.ListFields(targetTable)]
                    expr = getExpression(attrs, fieldNames, value)
                    arcpy.SetProgressorLabel("Loading Dataset " + name + " Where " + expr)
                    tName = targetTable[targetTable.rfind("\\") + 1 :]
                    tLocation = targetTable[0 : targetTable.rfind("\\")]
                    if gzSupport.deleteRows(tLocation, tName, expr) == True:
                        retVal = gzSupport.appendRows(sourceTable, targetTable, expr)
                        if retVal == False:
                            success == False
                    else:
                        success == False
                    arcpy.SetProgressorPosition()
                if success == True:
                    if desc.workspaceType == "RemoteDatabase":
                        arcpy.SetProgressorLabel("Reconcile and Post")
                        retVal = gzSupport.reconcilePost(defaultWorkspace, versionName, defaultVersionName)
                        if retVal == False:
                            success = False
                            gzSupport.deleteVersion(defaultWorkspace, versionName)
                    elif desc.workspaceType == "LocalDatabase":
                        arcpy.SetProgressorLabel("Completed Update for " + str(value))
                    gzSupport.logDatasetProcess(targetTable, sys.argv[0], retVal)
                else:
                    gzSupport.logDatasetProcess(targetTable, sys.argv[0], retVal)
                gzSupport.cleanupGarbage()

        except:
            gzSupport.showTraceback()
            success = False
            gzSupport.logDatasetProcess("Serious error", sys.argv[0], False)
        finally:
            arcpy.SetProgressorPosition()
            arcpy.ClearWorkspaceCache_management(defaultWorkspace)
    if success == False:
        gzSupport.addError("Errors occurred during process, look in log files for more information")
    if gzSupport.ignoreErrors == True:
        success = True
    if desc.workspaceType == "RemoteDatabase" and success == True:
        analyze(defaultWorkspace, datasets, tNames, tFullNames)
    arcpy.SetParameter(SUCCESS, success)

    arcpy.ClearWorkspaceCache_management(defaultWorkspace)
    gzSupport.compressGDB(gzSupport.workspace)
    gzSupport.compressGDB(defaultWorkspace)
    gzSupport.closeLog()
    return
Beispiel #6
0
def main(argv = None):
    global targetWorkspace
    hasVersion = False
    desc = arcpy.Describe(gzSupport.workspace)
    if desc.workspaceType != "RemoteDatabase" and versionName == None:
        targetWorkspace = defaultWorkspace
    success = True
    arcpy.ResetProgressor()
    arcpy.env.Workspace = gzSupport.workspace
    uniqueValues = gzSupport.getFieldValues("Unique",fieldNames,datasets)[0]
    sources = gzSupport.listDatasets(gzSupport.workspace)
    sNames = sources[0]
    sFullNames = sources[1]
    arcpy.SetProgressor("Step","Load by " + str(fieldNames) + "...",0,len(uniqueValues)*len(datasets),1)
    for value in uniqueValues:
        try:
            hasVersion = False
            gzSupport.addMessage(value)
            if desc.workspaceType == "RemoteDatabase" and versionName != None:
                arcpy.SetProgressorLabel("Creating Version " + versionName)
                hasVersion = gzSupport.createVersion(defaultWorkspace,defaultVersionName,versionName)
            if hasVersion == True  or versionName == None or desc.workspaceType == "LocalDatabase":
                arcpy.env.Workspace = targetWorkspace
                targets = gzSupport.listDatasets(targetWorkspace)
                tNames = targets[0]
                tFullNames = targets[1]
                for dataset in datasets:
                    name = dataset.getAttributeNode("name").nodeValue
                    arcpy.SetProgressorLabel("Loading Dataset " + name)
                    targetTable = gzSupport.getFullName(name,tNames,tFullNames)
                    sourceTable = gzSupport.getFullName(name,sNames,sFullNames)
                    attrs = [f.name for f in arcpy.ListFields(targetTable)]
                    expr = getExpression(attrs,fieldNames,value)
                    arcpy.SetProgressorLabel("Loading Dataset " + name + " Where " + expr)
                    tName = targetTable[targetTable.rfind("\\")+1:]
                    tLocation = targetTable[0:targetTable.rfind("\\")]
                    if gzSupport.deleteRows(tLocation,tName,expr) == True:
                        retVal = gzSupport.appendRows(sourceTable,targetTable,expr)
                        if retVal == False:
                            success == False
                    else:
                        success == False
                    arcpy.SetProgressorPosition()
                if success == True:
                    if desc.workspaceType == "RemoteDatabase":
                        arcpy.SetProgressorLabel("Reconcile and Post")
                        retVal = gzSupport.reconcilePost(defaultWorkspace,versionName,defaultVersionName)
                        if retVal == False:
                            success = False
                            gzSupport.deleteVersion(defaultWorkspace,versionName)
                    elif desc.workspaceType == "LocalDatabase":
                        arcpy.SetProgressorLabel("Completed Update for " + str(value))
                    gzSupport.logDatasetProcess(targetTable,sys.argv[0],retVal)
                else:
                    gzSupport.logDatasetProcess(targetTable,sys.argv[0],retVal)

        except:
            gzSupport.showTraceback()
            success = False
            gzSupport.logDatasetProcess("Serious error",sys.argv[0],False)
        finally:
            arcpy.SetProgressorPosition()
            arcpy.ClearWorkspaceCache_management(defaultWorkspace)
    if success == False:
        gzSupport.addError("Errors occurred during process, look in log files for more information")        
    if gzSupport.ignoreErrors == True:
        success = True
    if desc.workspaceType == "RemoteDatabase" and success == True:
        analyze(defaultWorkspace,datasets,tNames,tFullNames)
    arcpy.SetParameter(SUCCESS, success)

    arcpy.ClearWorkspaceCache_management(defaultWorkspace)
    gzSupport.compressGDB(gzSupport.workspace)
    gzSupport.compressGDB(defaultWorkspace)
    gzSupport.closeLog()
    return