Beispiel #1
0
def main(argv = None):
    # main function - list the datasets and delete rows
    success = True
    try:
        names = gzSupport.listDatasets(sourceGDB)
        tNames = names[0]
        tFullNames = names[1]
        arcpy.SetProgressor("Step","Deleting rows...",0,len(tFullNames),1)
        i = 0
        for name in tFullNames:
            arcpy.SetProgressorPosition(i)
            arcpy.SetProgressorLabel(" Deleting rows in " + name + "...")
            # for each full name
            if len(datasetNames) == 0 or tNames[i].upper() in datasetNames:
                retVal = doTruncate(name)
                gzSupport.logDatasetProcess(name,"deleteRowsGDB",retVal)
                if retVal == False:
                    success = False
            else:
                gzSupport.addMessage("Skipping "  + tNames[i])
            i += i
    except:
        gzSupport.showTraceback()
        gzSupport.addError(pymsg)
        success = False
        gzSupport.logDatasetProcess(name,"deleteRowsGDB",success)
    finally:
        arcpy.SetParameter(SUCCESS, success)
        arcpy.ResetProgressor()
        gzSupport.closeLog()
        arcpy.ClearWorkspaceCache_management(sourceGDB)
Beispiel #2
0
def getDocument(dataset):
    gzSupport.addMessage(dataset)
    desc = arcpy.Describe(dataset)
    xmlDoc = Document()
    root = xmlDoc.createElement('table')
    xmlDoc.appendChild(root)
    root.setAttribute("xmlns",'http://gizinta.com')
    if desc.baseName.find('.') > -1:
        baseName = desc.baseName[desc.baseName.rfind('.')+1:]
    else:
        baseName = desc.baseName
        
    source = xmlDoc.createElement("data")
    source.setAttribute("name",baseName)
    root.appendChild(source)
    fields = getFields(dataset)
    i=0
    try:
        for field in fields:
            fNode = xmlDoc.createElement("row")
            fNode.setAttribute("id",str(i))
            source.appendChild(fNode)                
            addFieldElement(xmlDoc,fNode,"FieldName",field.name)
            addFieldElement(xmlDoc,fNode,"SourceField","")
            addFieldElement(xmlDoc,fNode,"SourceQA","Required") # need to get these values from template project.
            addFieldElement(xmlDoc,fNode,"TargetQA","Required")
            addFieldElement(xmlDoc,fNode,"SourceMethod","Copy")
            addFieldElement(xmlDoc,fNode,"FieldType",field.type)
            addFieldElement(xmlDoc,fNode,"FieldLength",str(field.length))
            i += 1
        xmlStr = xmlDoc.toxml()
    except:
        gzSupport.showTraceback()
        xmlStr =""
    return xmlStr
Beispiel #3
0
def writeDocument(files, outputFileName):

    xmlDoc = Document()
    root = xmlDoc.createElement('GizintaPlaylist')
    xmlDoc.appendChild(root)
    root.setAttribute("logTableName", 'gzLog')
    root.setAttribute("errorTableName", 'gzError')
    root.setAttribute("fileExtension", '.dwg')
    root.setAttribute("xmlns:gizinta", 'http://gizinta.com')
    for fname in files:

        fElem = xmlDoc.createElement("File")
        root.appendChild(fElem)
        nodeText = xmlDoc.createTextNode(fname)
        fElem.appendChild(nodeText)
    try:
        xmlStr = xmlDoc.toprettyxml()
        uglyXml = xmlDoc.toprettyxml(indent='	')
        text_re = re.compile('>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL)
        prettyXml = text_re.sub('>\g<1></', uglyXml)

        fHandle = open(outputFileName, 'w')
        fHandle.write(prettyXml)
        fHandle.close()
    except:
        gzSupport.showTraceback()
        xmlStr = ""
    return xmlStr
Beispiel #4
0
def exportDataset(sourceLayer, targetName, dataset):
    result = True
    targetTable = os.path.join(gzSupport.workspace, targetName)
    gzSupport.addMessage("Exporting Layer from " + sourceLayer)
    whereClause = ""
    try:
        try:
            whereClause = gzSupport.getNodeValue(dataset, "WhereClause")
        except:
            whereClause = ""
        gzSupport.addMessage("Where '" + whereClause + "'")
        sourceName = sourceLayer[sourceLayer.rfind(os.sep) + 1 : sourceLayer.lower().rfind(".lyr")]
        viewName = sourceName + "_View"
        xmlFields = xmlDoc.getElementsByTagName("Field")
        view = gzSupport.makeFeatureViewForLayer(gzSupport.workspace, sourceLayer, viewName, whereClause, xmlFields)
        count = arcpy.GetCount_management(view).getOutput(0)
        gzSupport.addMessage(str(count) + " source rows")
        arcpy.FeatureClassToFeatureClass_conversion(view, gzSupport.workspace, targetName)
    except:
        err = "Failed to create new dataset " + targetName
        gzSupport.showTraceback()
        gzSupport.addError(err)
        gzSupport.logProcessError(sourceLayer, gzSupport.sourceIDField, sourceLayer, targetName, err)
        result = False
    return result
Beispiel #5
0
def main(argv = None):
    # main function - list the datasets and delete rows
    success = True
    name = ''
    gzSupport.workspace = sourceGDB
    try:
        if len(datasetNames) == 0:
            names = gzSupport.listDatasets(sourceGDB)
            tNames = names[0]
        else:
            tNames = datasetNames
        arcpy.SetProgressor("Step","Deleting rows...",0,len(tNames),1)
        i = 0
        for name in tNames:
            arcpy.SetProgressorPosition(i)
            arcpy.SetProgressorLabel(" Deleting rows in " + name + "...")
            # for each full name
            if len(datasetNames) == 0 or gzSupport.nameTrimmer(name.upper()) in datasetNames:
                retVal = doTruncate(os.path.join(sourceGDB,name))
                gzSupport.logDatasetProcess("deleteRowsGDB",name,retVal)
                if retVal == False:
                    success = False
            else:
                gzSupport.addMessage("Skipping "  + gzSupport.nameTrimmer(name))
            i = i + i
    except:
        gzSupport.showTraceback()
        gzSupport.addError("Failed to delete rows")
        success = False
        gzSupport.logDatasetProcess("deleteRowsGDB",name,success)
    finally:
        arcpy.SetParameter(SUCCESS, success)
        arcpy.ResetProgressor()
        gzSupport.closeLog()
        arcpy.ClearWorkspaceCache_management(sourceGDB)
def main(argv = None):
    success = True
    targetName = ''
    try:
        if not arcpy.Exists(gzSupport.workspace):
            gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create")
            gzSupport.createGizintaGeodatabase()
        else:
            gzSupport.compressGDB(gzSupport.workspace)
        if len(datasets) > 0:
            progBar = len(datasets) + 1
            arcpy.SetProgressor("step", "Importing Datasets...", 0,progBar, 1)
            #gzSupport.deleteExistingRows(datasets)
            arcpy.SetProgressorPosition()
        for dataset in datasets:
            gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue
            sourceName = dataset.getAttributeNode("sourceName").nodeValue
            targetName = dataset.getAttributeNode("targetName").nodeValue
            xmlFields = gzSupport.getXmlElements(gzSupport.xmlFileName,"Field")
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName +"...")
            if not arcpy.Exists(os.path.join(sourceWorkspace,sourceName)):
                gzSupport.addError(os.path.join(sourceWorkspace,sourceName + " does not exist, exiting"))
                return
            if not arcpy.Exists(os.path.join(gzSupport.workspace,targetName)):
                gzSupport.addMessage(os.path.join(gzSupport.workspace,targetName) + " does not exist")
            else:
                arcpy.Delete_management(os.path.join(gzSupport.workspace,targetName))

            arcpy.env.Workspace = gzSupport.workspace
            try:
                retVal = gzSupport.exportDataset(sourceWorkspace,sourceName,targetName,dataset, xmlFields)
                if retVal == False:
                    success = False
            except:
                gzSupport.showTraceback()
                success = False
                retVal = False
            gzSupport.logDatasetProcess(sourceName,targetName,retVal)
        arcpy.SetProgressorPosition()
    except:
        gzSupport.showTraceback()
        gzSupport.addError("A Fatal Error occurred")
        success = False
        gzSupport.logDatasetProcess("extractWorkspaceToGDB",targetName,False)
    finally:
        arcpy.ResetProgressor()
        arcpy.RefreshCatalog(gzSupport.workspace)
        try:
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
            arcpy.ClearWorkspaceCache_management(sourceWorkspace)
        except:
            gzSupport.addMessage("Unable to clear workspace cache, continuing")

    if success == False:
        gzSupport.addError("Errors occurred during process, look in log files for more information")
    if gzSupport.ignoreErrors == True:
        success = True

    gzSupport.closeLog()
    arcpy.SetParameter(SUCCESS, success)
Beispiel #7
0
def main(argv=None):
    success = True
    gzSupport.compressGDB(gzSupport.workspace)
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    tables = gzSupport.listDatasets(gzSupport.workspace)
    tNames = tables[0]
    tFullNames = tables[1]
    name = ''

    for dataset in datasets:
        arcpy.env.Workspace = gzSupport.workspace
        name = dataset.getAttributeNode("name").nodeValue
        table = gzSupport.getFullName(name, tNames, tFullNames)
        gzSupport.sourceIDField = dataset.getAttributeNode(
            "sourceIDField").nodeValue
        gzSupport.sourceNameField = dataset.getAttributeNode(
            "sourceNameField").nodeValue
        if not arcpy.Exists(table):
            gzSupport.addError("Feature Class " + table +
                               " does not exist, exiting")
            arcpy.SetParameter(SUCCESS, False)
            return
        if not arcpy.TestSchemaLock(table):
            gzSupport.addError("Unable to obtain a schema lock for " + table +
                               ", exiting")
            arcpy.SetParameter(SUCCESS, False)
            return -1
        desc = arcpy.Describe(table)
        fields = dataset.getElementsByTagName("Field")
        try:
            attrs = [f.name for f in arcpy.ListFields(table)]
            for field in fields:
                arcpy.env.Workspace = gzSupport.workspace
                targetName = gzSupport.getNodeValue(field, "TargetName")
                gzSupport.addGizintaField(table, targetName, field, attrs)

            retVal = setFieldValues(table, fields)
            if retVal == False:
                success = False
            gzSupport.logDatasetProcess(name, "Fields", retVal)
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
            gzSupport.cleanupGarbage()

        except:
            gzSupport.showTraceback()
            success = False
            gzSupport.logDatasetProcess("fieldCalculator", name, False)
        finally:
            arcpy.RefreshCatalog(table)
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    if success == False:
        gzSupport.addError(
            "Errors occurred during process, look in log file tools\\log\\fieldCalculator.log for more information"
        )
    if gzSupport.ignoreErrors == True:
        success = True
    arcpy.SetParameter(SUCCESS, success)
    arcpy.ResetProgressor()
    gzSupport.closeLog()
    return
Beispiel #8
0
def main(argv=None):
    # main function - list the datasets and delete rows
    success = True
    try:
        names = gzSupport.listDatasets(sourceGDB)
        tNames = names[0]
        tFullNames = names[1]
        arcpy.SetProgressor("Step", "Deleting rows...", 0, len(tFullNames), 1)
        i = 0
        for name in tFullNames:
            arcpy.SetProgressorPosition(i)
            arcpy.SetProgressorLabel(" Deleting rows in " + name + "...")
            # for each full name
            if len(datasetNames) == 0 or tNames[i].upper() in datasetNames:
                retVal = doTruncate(name)
                gzSupport.logDatasetProcess(name, "deleteRowsGDB", retVal)
                if retVal == False:
                    success = False
            else:
                gzSupport.addMessage("Skipping " + tNames[i])
            i += i
    except:
        gzSupport.showTraceback()
        gzSupport.addError(pymsg)
        success = False
        gzSupport.logDatasetProcess(name, "deleteRowsGDB", success)
    finally:
        arcpy.SetParameter(SUCCESS, success)
        arcpy.ResetProgressor()
        gzSupport.closeLog()
        arcpy.ClearWorkspaceCache_management(sourceGDB)
Beispiel #9
0
def writeDocument(files,outputFileName):

    xmlDoc = Document()
    root = xmlDoc.createElement('GizintaPlaylist')
    xmlDoc.appendChild(root)
    root.setAttribute("logTableName",'gzLog')
    root.setAttribute("errorTableName",'gzError')
    root.setAttribute("fileExtension",'.dwg')
    root.setAttribute("xmlns:gizinta",'http://gizinta.com')
    for fname in files:

        fElem = xmlDoc.createElement("File")
        root.appendChild(fElem)
        nodeText = xmlDoc.createTextNode(fname)
        fElem.appendChild(nodeText)
    try:
        xmlStr = xmlDoc.toprettyxml()
        uglyXml = xmlDoc.toprettyxml(indent='	')
        text_re = re.compile('>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL)
        prettyXml = text_re.sub('>\g<1></', uglyXml)

        fHandle = open(outputFileName, 'w')
        fHandle.write(prettyXml)
        fHandle.close()
    except:
        gzSupport.showTraceback()
        xmlStr =""
    return xmlStr
Beispiel #10
0
def main(argv = None):
    success = True
    gzSupport.compressGDB(gzSupport.workspace)
    tables = gzSupport.listDatasets(gzSupport.workspace)
    tNames = tables[0]
    tFullNames = tables[1]

    if len(datasets) > 0:
        progBar = len(datasets)
        arcpy.SetProgressor("step", "Running QA...", 0,progBar, 1)
    for dataset in datasets:
        arcpy.env.Workspace = gzSupport.workspace
        name = dataset.getAttributeNode("name").nodeValue
        gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue
        table = gzSupport.getFullName(name,tNames, tFullNames)
        #table = os.path.join(gzSupport.workspace,name)
        fields = dataset.getElementsByTagName("Field")
        name = ''
        try:
            # run qa for dataset
            qaRulesDataset = dataset.getAttributeNode("qa").nodeValue
            gzSupport.addMessage("\nRunning QA (" + qaRulesDataset + ") for " + name)
            retVal = runDatasetChecks(dataset,table,qaRulesDataset)
            if retVal == False:
                success = False

            for field in fields:
                sourceQA = False
                targetQA = False
                fieldName = gzSupport.getNodeValue(field,"TargetName")
                if sourceFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1:
                    sourceQA = True
                    fieldName = gzSupport.getNodeValue(field,"SourceName")
                if targetFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1:
                    targetQA = True
                    fieldName = gzSupport.getNodeValue(field,"TargetName")
                retVal = runFieldCheck(dataset,table,field,sourceQA,targetQA)
                if retVal == False:
                    success = False
                try:
                    gzSupport.logDatasetProcess(name,fieldName,retVal)
                except:
                    gzSupport.addMessage("Process not logged for field")
            arcpy.SetProgressorPosition()
        except:
            gzSupport.showTraceback()
            gzSupport.addError("Field Check Error")
            success = False
            gzSupport.logDatasetProcess("sourceTargetQA",name,False)
        finally:
            arcpy.ResetProgressor()
            arcpy.RefreshCatalog(table)
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    if success == False:
        gzSupport.addError("Errors occurred during process, look in log file tools\\log\\sourceTargetQA.log for more information")
    if gzSupport.ignoreErrors == True:
        success = True
    arcpy.SetParameter(SUCCESS, success)
    gzSupport.closeLog()
    return
Beispiel #11
0
def main(argv=None):
    success = True
    name = ''
    try:
        if not arcpy.Exists(gzSupport.workspace):
            gzSupport.addMessage(gzSupport.workspace +
                                 " does not exist, attempting to create")
            gzSupport.createGizintaGeodatabase()
        else:
            gzSupport.compressGDB(gzSupport.workspace)
        if len(datasets) > 0:
            progBar = len(datasets) + 1
            arcpy.SetProgressor("step", "Importing Layers...", 0, progBar, 1)
            arcpy.SetProgressorPosition()
        for dataset in datasets:
            gzSupport.sourceIDField = dataset.getAttributeNode(
                "sourceIDField").nodeValue
            sourceName = dataset.getAttributeNode("sourceName").nodeValue
            targetName = dataset.getAttributeNode("targetName").nodeValue
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " +
                                     targetName + "...")
            if not arcpy.Exists(sourceLayer):
                gzSupport.addError("Layer " + sourceLayer +
                                   " does not exist, exiting")
                return
            target = os.path.join(gzSupport.workspace, targetName)
            arcpy.env.Workspace = gzSupport.workspace
            if not arcpy.Exists(target):
                gzSupport.addMessage("Feature Class " + target +
                                     " does not exist")
            else:
                arcpy.Delete_management(target)
            try:
                retVal = exportDataset(sourceLayer, targetName, dataset)
                if retVal == False:
                    success = False
            except:
                gzSupport.showTraceback()
                success = False
                retVal = False
            gzSupport.logDatasetProcess(sourceName, targetName, retVal)
        arcpy.SetProgressorPosition()
    except:
        gzSupport.addError("A Fatal Error occurred")
        gzSupport.showTraceback()
        success = False
        gzSupport.logDatasetProcess("extractLayerToGDB", name, False)
    finally:
        arcpy.ResetProgressor()
        arcpy.RefreshCatalog(gzSupport.workspace)
        arcpy.ClearWorkspaceCache_management(gzSupport.workspace)

    if success == False:
        gzSupport.addError(
            "Errors occurred during process, look in log files for more information"
        )
    if gzSupport.ignoreErrors == True:
        success = True
    gzSupport.closeLog()
    arcpy.SetParameter(SUCCESS, success)
Beispiel #12
0
def exportDataset(sourceLayer, targetName, dataset):
    result = True
    targetTable = os.path.join(gzSupport.workspace, targetName)
    gzSupport.addMessage("Exporting Layer from " + sourceLayer)
    whereClause = ""
    try:
        try:
            whereClause = gzSupport.getNodeValue(dataset, "WhereClause")
        except:
            whereClause = ''
        gzSupport.addMessage("Where '" + whereClause + "'")
        sourceName = sourceLayer[sourceLayer.rfind(os.sep) +
                                 1:sourceLayer.lower().rfind(".lyr")]
        viewName = sourceName + "_View"
        xmlFields = xmlDoc.getElementsByTagName("Field")
        view = gzSupport.makeFeatureViewForLayer(gzSupport.workspace,
                                                 sourceLayer, viewName,
                                                 whereClause, xmlFields)
        count = arcpy.GetCount_management(view).getOutput(0)
        gzSupport.addMessage(str(count) + " source rows")
        arcpy.FeatureClassToFeatureClass_conversion(view, gzSupport.workspace,
                                                    targetName)
    except:
        err = "Failed to create new dataset " + targetName
        gzSupport.showTraceback()
        gzSupport.addError(err)
        gzSupport.logProcessError(sourceLayer, gzSupport.sourceIDField,
                                  sourceLayer, targetName, err)
        result = False
    return result
Beispiel #13
0
def main(argv = None):
    # main function - list the source and target datasets, then delete rows/append where there is a match on non-prefixed name
    arcpy.AddToolbox(os.path.join(os.path.dirname(sys.path[0]),"Gizinta.tbx"))
    success = True
    try:

        gzSupport.addMessage("Getting list of datasets for Target " + targetGDB)
        targets = gzSupport.listDatasets(targetGDB)
        tNames = targets[0]
        tFullNames = targets[1]

        gzSupport.addMessage("Getting list of datasets for Source " + sourceGDB)
        sources = gzSupport.listDatasets(sourceGDB)
        sNames = sources[0]
        sFullNames = sources[1]

        t = 0
        arcpy.SetProgressor("Step","Creating Files...",0,len(tNames),1)
        
        for name in tNames:
            arcpy.SetProgressorPosition(t)
            arcpy.SetProgressorLabel("Creating file for " + name + "...")
            # for each source name
            if debug:
                gzSupport.addMessage(name)
            try:
                # look for the matching name in target names
                s = sNames.index(name)
            except:
                # will get here if no match
                s = -1
            if s > -1:
                # create file if there is a match
                fileName = outputFolder + os.sep + prefixStr + name.title() + ".xml"
                if os.path.exists(fileName):
                    os.remove(fileName)
                try:
                    arcpy.gzCreateProject_gizinta(sFullNames[s],tFullNames[t],fileName)
                    retVal = True
                    gzSupport.addMessage("Created "  + fileName)
                except:
                    retVal = False
                if retVal == False:                    
                    gzSupport.addMessage("Failed to create file for "  + name)
                    gzSupport.showTraceback()
                    success = False
            else:
                gzSupport.addMessage("Skipping "  + name)
            t = t + 1
    except:
        gzSupport.showTraceback()
        arcpy.AddError(pymsg)
        success = False

    finally:
        arcpy.ResetProgressor()
        arcpy.SetParameter(gzSupport.successParameterNumber, success)
        arcpy.env.workspace = targetGDB
        arcpy.RefreshCatalog(outputFolder)
        gzSupport.closeLog()
Beispiel #14
0
def getDocument(dataset):
    gzSupport.addMessage(dataset)
    desc = arcpy.Describe(dataset)
    xmlDoc = Document()
    root = xmlDoc.createElement('table')
    xmlDoc.appendChild(root)
    root.setAttribute("xmlns",'http://gizinta.com')
    if desc.baseName.find('.') > -1:
        baseName = desc.baseName[desc.baseName.rfind('.')+1:]
    else:
        baseName = desc.baseName

    source = xmlDoc.createElement("data")
    source.setAttribute("name",baseName)
    root.appendChild(source)
    fields = getFields(dataset)
    i=0
    try:
        for field in fields:
            fNode = xmlDoc.createElement("row")
            fNode.setAttribute("id",str(i))
            source.appendChild(fNode)
            addFieldElement(xmlDoc,fNode,"FieldName",field.name)
            addFieldElement(xmlDoc,fNode,"SourceField","")
            addFieldElement(xmlDoc,fNode,"SourceQA","Required") # need to get these values from template project.
            addFieldElement(xmlDoc,fNode,"TargetQA","Required")
            addFieldElement(xmlDoc,fNode,"SourceMethod","Copy")
            addFieldElement(xmlDoc,fNode,"FieldType",field.type)
            addFieldElement(xmlDoc,fNode,"FieldLength",str(field.length))
            i += 1
        xmlStr = xmlDoc.toxml()
    except:
        gzSupport.showTraceback()
        xmlStr =""
    return xmlStr
Beispiel #15
0
def importLayer(cadPath, cadName, dataset):
    result = False
    try:
        name = dataset.getAttributeNode("targetName").nodeValue
    except:
        name = dataset.getAttributeNode("name").nodeValue

    table = os.path.join(gzSupport.workspace, name)
    layerName = dataset.getAttributeNode("sourceName").nodeValue
    layer = os.path.join(cadPath, cadName, layerName)
    gzSupport.addMessage("Importing Layer " + layer)

    try:
        whereClause = gzSupport.getNodeValue(dataset, "WhereClause")
        xmlFields = dataset.getElementsByTagName("Field")
        gzSupport.addMessage("Where " + whereClause)
        if not arcpy.Exists(table):
            err = "Feature Class " + name + " does not exist"
            gzSupport.addError(err)
            gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name,
                                      name, err)
            return False
        if whereClause != '':
            view = gzSupport.makeFeatureView(gzSupport.workspace, layer,
                                             layerName + "_View", whereClause,
                                             xmlFields)
        else:
            view = layer
        count = arcpy.GetCount_management(view).getOutput(0)
        gzSupport.addMessage(str(count) + " source Features for " + name)

        if hasJoinTo(dataset) == True:
            res = joinToCsv(view, dataset, cadPath, cadName)
            result = res[0]
            view = res[1]
        else:
            view = view
            result = True

        if result == True and count > 0:
            arcpy.Append_management([view], table, "NO_TEST", "", "")
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)

    except:
        err = "Failed to import layer " + name
        gzSupport.addError(err)
        gzSupport.showTraceback()
        gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name,
                                  layerName, err)
    gzSupport.cleanupGarbage()
    try:
        del view
    except:
        gzSupport.addMessage("")
    return result
Beispiel #16
0
def main(argv = None):
    success = True
    gzSupport.compressGDB(gzSupport.workspace)
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    tables = gzSupport.listDatasets(gzSupport.workspace)
    tNames = tables[0]
    tFullNames = tables[1]
    name = ''
    
    for dataset in datasets:
        arcpy.env.Workspace = gzSupport.workspace
        name = dataset.getAttributeNode("name").nodeValue
        table = gzSupport.getFullName(name,tNames,tFullNames)
        gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue
        gzSupport.sourceNameField = dataset.getAttributeNode("sourceNameField").nodeValue
        if not arcpy.Exists(table):
            gzSupport.addError("Feature Class " + table + " does not exist, exiting")
            arcpy.SetParameter(SUCCESS, False)
            return
        if not arcpy.TestSchemaLock(table):
            gzSupport.addError("Unable to obtain a schema lock for " + table + ", exiting")
            arcpy.SetParameter(SUCCESS, False)
            return -1
        desc = arcpy.Describe(table)
        fields = dataset.getElementsByTagName("Field")
        try:
            attrs = [f.name for f in arcpy.ListFields(table)]
            for field in fields:
                arcpy.env.Workspace = gzSupport.workspace
                targetName = gzSupport.getNodeValue(field,"TargetName")
                gzSupport.addGizintaField(table,targetName,field,attrs)

            retVal = setFieldValues(table,fields)
            if retVal == False:
                success = False
            gzSupport.logDatasetProcess(name,"Fields",retVal)
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
            gzSupport.cleanupGarbage()

        except:
            gzSupport.showTraceback()
            success = False
            gzSupport.logDatasetProcess("fieldCalculator",name,False)
        finally:
            arcpy.RefreshCatalog(table)
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    if success == False:
        gzSupport.addError("Errors occurred during process, look in log file tools\\log\\fieldCalculator.log for more information")
    if gzSupport.ignoreErrors == True:
        success = True
    arcpy.SetParameter(SUCCESS, success)
    arcpy.ResetProgressor()
    gzSupport.closeLog()
    return
Beispiel #17
0
def main(argv = None):
    success = True
    name = ''
    try:
        if not arcpy.Exists(gzSupport.workspace):
            gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create")
            gzSupport.createGizintaGeodatabase()
        else:
            gzSupport.compressGDB(gzSupport.workspace)
        if len(datasets) > 0:
            progBar = len(datasets) + 1
            arcpy.SetProgressor("step", "Importing Layers...", 0,progBar, 1)
            arcpy.SetProgressorPosition()
        for dataset in datasets:
            gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue
            sourceName = dataset.getAttributeNode("sourceName").nodeValue
            targetName = dataset.getAttributeNode("targetName").nodeValue
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " + targetName +"...")
            if not arcpy.Exists(sourceLayer):
                gzSupport.addError("Layer " + sourceLayer + " does not exist, exiting")
                return
            target = os.path.join(gzSupport.workspace,targetName)
            arcpy.env.Workspace = gzSupport.workspace
            if not arcpy.Exists(target):
                gzSupport.addMessage("Feature Class " + target + " does not exist")
            else:
                arcpy.Delete_management(target)
            try:
                retVal = exportDataset(sourceLayer,targetName,dataset)
                if retVal == False:
                    success = False
            except:
                gzSupport.showTraceback()
                success = False
                retVal = False
            gzSupport.logDatasetProcess(sourceName,targetName,retVal)
        arcpy.SetProgressorPosition()
    except:
        gzSupport.addError("A Fatal Error occurred")
        gzSupport.showTraceback()
        success = False
        gzSupport.logDatasetProcess("extractLayerToGDB",name,False)
    finally:
        arcpy.ResetProgressor()
        arcpy.RefreshCatalog(gzSupport.workspace)
        arcpy.ClearWorkspaceCache_management(gzSupport.workspace)

    if success == False:
        gzSupport.addError("Errors occurred during process, look in log files for more information")
    if gzSupport.ignoreErrors == True:
        success = True
    gzSupport.closeLog()
    arcpy.SetParameter(SUCCESS, success)
Beispiel #18
0
def importLayer(cadPath, cadName, dataset):
    result = False
    try:
        name = dataset.getAttributeNode("targetName").nodeValue
    except:
        name = dataset.getAttributeNode("name").nodeValue

    table = os.path.join(gzSupport.workspace, name)
    layerName = dataset.getAttributeNode("sourceName").nodeValue
    layer = os.path.join(cadPath, cadName, layerName)
    gzSupport.addMessage("Importing Layer " + layer)

    try:
        whereClause = gzSupport.getNodeValue(dataset, "WhereClause")
        xmlFields = dataset.getElementsByTagName("Field")
        gzSupport.addMessage("Where " + whereClause)
        if not arcpy.Exists(table):
            err = "Feature Class " + name + " does not exist"
            gzSupport.addError(err)
            gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, name, err)
            return False
        if whereClause != "":
            view = gzSupport.makeFeatureView(gzSupport.workspace, layer, layerName + "_View", whereClause, xmlFields)
        else:
            view = layer
        count = arcpy.GetCount_management(view).getOutput(0)
        gzSupport.addMessage(str(count) + " source Features for " + name)

        if hasJoinTo(dataset) == True:
            res = joinToCsv(view, dataset, cadPath, cadName)
            result = res[0]
            view = res[1]
        else:
            view = view
            result = True

        if result == True and count > 0:
            arcpy.Append_management([view], table, "NO_TEST", "", "")
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)

    except:
        err = "Failed to import layer " + name
        gzSupport.addError(err)
        gzSupport.showTraceback()
        gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, layerName, err)
    gzSupport.cleanupGarbage()
    try:
        del view
    except:
        gzSupport.addMessage("")
    return result
Beispiel #19
0
def main(argv = None):
    success = True
    gzSupport.compressGDB(gzSupport.workspace)
    if len(datasets) > 0:
        progBar = len(datasets)
        arcpy.SetProgressor("step", "Running QA...", 0,progBar, 1) 
    for dataset in datasets:
        arcpy.env.Workspace = gzSupport.workspace
        name = dataset.getAttributeNode("name").nodeValue
        gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue
        table = os.path.join(gzSupport.workspace,name)
        fields = dataset.getElementsByTagName("Field")
        try:
            # run qa for dataset
            qaRulesDataset = dataset.getAttributeNode("qa").nodeValue
            gzSupport.addMessage("\nRunning QA (" + qaRulesDataset + ") for " + name)
            retVal = runDatasetChecks(dataset,table,qaRulesDataset)
            if retVal == False:
                success = False
            
            for field in fields:
                sourceQA = False
                targetQA = False
                if sourceFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1:
                    sourceQA = True
                    fieldName = gzSupport.getNodeValue(field,"SourceName")
                if targetFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1:
                    targetQA = True
                    fieldName = gzSupport.getNodeValue(field,"TargetName")
                retVal = runFieldCheck(dataset,table,field,sourceQA,targetQA)
                if retVal == False:
                    success = False
                gzSupport.logDatasetProcess(name,fieldName,retVal)
            arcpy.SetProgressorPosition()
        except:
            gzSupport.showTraceback()
            gzSupport.addError("Field Check Error")
            success = False
            gzSupport.logDatasetProcess(name,"",False)
        finally:
            arcpy.ResetProgressor()
            arcpy.RefreshCatalog(table)
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    if success == False:
        gzSupport.addError("Errors occurred during process, look in log files for more information")        
    if gzSupport.ignoreErrors == True:
        success = True
    arcpy.SetParameter(SUCCESS, success)
    gzSupport.closeLog()
    return
Beispiel #20
0
def main(argv=None):
    # main function - list the source and target datasets, then append where there is a match on non-prefixed name
    success = True
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    try:
        sources = gzSupport.listDatasets(sourceGDB)
        sNames = sources[0]
        sFullNames = sources[1]
        targets = gzSupport.listDatasets(targetGDB)
        tNames = targets[0]
        tFullNames = targets[1]
        s = 0
        arcpy.SetProgressor("Step", "Appending rows...", 0, len(sFullNames), 1)
        for name in sNames:
            arcpy.SetProgressorPosition(s)
            arcpy.SetProgressorLabel(" Deleting rows in " + name + "...")
            # for each source name
            if debug:
                gzSupport.addMessage(name)
            try:
                # look for the matching name in target names
                t = tNames.index(name)
            except:
                # will get here if no match
                t = -1
            if t > -1:
                # append if there is a match
                if len(datasetNames) == 0 or name.upper() in datasetNames:
                    retVal = doAppend(sFullNames[s], tFullNames[t])
                    gzSupport.logDatasetProcess(name, "appendAlltoGDB", retVal)
                    if retVal == False:
                        success = False
                else:
                    gzSupport.addMessage("Skipping " + name)

            s = s + 1
    except:
        gzSupport.showTraceback()
        gzSupport.addError(pymsg)
        success = False
        gzSupport.logDatasetProcess(name, "appendAlltoGDB", success)
    finally:
        arcpy.SetParameter(SUCCESS, success)
        arcpy.ResetProgressor()
        gzSupport.closeLog()
        arcpy.ClearWorkspaceCache_management(targetGDB)
Beispiel #21
0
def main(argv = None):
    # main function - list the source and target datasets, then append where there is a match on non-prefixed name
    success = True
    name = ''
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    try:
        if len(datasetNames) == 0:
            sources = gzSupport.listDatasets(sourceGDB)
            sNames = sources[0]
            sFullNames = sources[1]
            targets = gzSupport.listDatasets(targetGDB)
            tNames = targets[0]
            tFullNames = targets[1]
        else:
            sNames = datasetNames
        
        s = 0
        arcpy.SetProgressor("Step","Appending rows...",0,len(sNames),1)
        for name in sNames:
            arcpy.SetProgressorPosition(s)
            arcpy.SetProgressorLabel(" Appending rows in " + name + "...")
            # for each source name
            if debug:
                gzSupport.addMessage(name)
            target = os.path.join(targetGDB,name)
            if arcpy.Exists(target):
                # append if there is a match
                if len(datasetNames) == 0 or gzSupport.nameTrimmer(name) in datasetNames:
                    retVal = doAppend(os.path.join(sourceGDB,name),target)
                    gzSupport.logDatasetProcess("appendAlltoGDB",name,retVal)
                    if retVal == False:
                        success = False
                else:
                    gzSupport.addMessage("Skipping "  + gzSupport.nameTrimmer(name))

            s = s + 1
    except:
        gzSupport.showTraceback()
        gzSupport.addError("Unable to append datasets")
        success = False
        gzSupport.logDatasetProcess("appendAlltoGDB",name,success)
    finally:
        arcpy.SetParameter(SUCCESS, success)
        arcpy.ResetProgressor()
        gzSupport.closeLog()
        arcpy.ClearWorkspaceCache_management(targetGDB)
Beispiel #22
0
def main(argv = None):
    # main function - list the source and target datasets, then append where there is a match on non-prefixed name
    success = True
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    try:
        sources = gzSupport.listDatasets(sourceGDB)
        sNames = sources[0]
        sFullNames = sources[1]
        targets = gzSupport.listDatasets(targetGDB)
        tNames = targets[0]
        tFullNames = targets[1]
        s = 0
        arcpy.SetProgressor("Step","Appending rows...",0,len(sFullNames),1)
        for name in sNames:
            arcpy.SetProgressorPosition(s)
            arcpy.SetProgressorLabel(" Deleting rows in " + name + "...")
            # for each source name
            if debug:
                gzSupport.addMessage(name)
            try:
                # look for the matching name in target names
                t = tNames.index(name)
            except:
                # will get here if no match
                t = -1
            if t > -1:
                # append if there is a match
                if len(datasetNames) == 0 or gzSupport.nameTrimmer(name) in datasetNames:
                    retVal = doAppend(sFullNames[s],tFullNames[t])
                    gzSupport.logDatasetProcess(name,"appendAlltoGDB",retVal)
                    if retVal == False:
                        success = False
                else:
                    gzSupport.addMessage("Skipping "  + gzSupport.nameTrimmer(name))

            s = s + 1
    except:
        gzSupport.showTraceback()
        gzSupport.addError("Unable to append datasets")
        success = False
        gzSupport.logDatasetProcess(name,"appendAlltoGDB",success)
    finally:
        arcpy.SetParameter(SUCCESS, success)
        arcpy.ResetProgressor()
        gzSupport.closeLog()
        arcpy.ClearWorkspaceCache_management(targetGDB)
def createView(viewName,sql):
    view = os.path.join(sde,viewName)
    printmsg(view)
    sql = sql[sql.find("AS SELECT ")+3:]
    retVal = False
    if arcpy.Exists(view) and recreate == False:
        printmsg("View already exists " + viewName)
    else:
        try:
            arcpy.CreateDatabaseView_management(sde,viewName,sql)
            retVal = True
            msg("-- View created" + view)
        except:
            printmsg("Failed to Create View")
            gzSupport.showTraceback()
            retVal = False
    return retVal
Beispiel #24
0
def calcValue(row,attrs,calcString):
    # calculate a value based on fields and or other expressions
    if calcString.find("|") > -1:
        calcList = calcString.split("|")
    else:
        calcList = calcString.split("!")
    outVal = ""
    for strVal in calcList:
        if strVal in attrs:
            outVal += str(row.getValue(strVal))
        else:    
            outVal += strVal
    try:
        outVal = eval(outVal)
    except:
        gzSupport.addMessage("Error evaluating:" + outVal)
        gzSupport.showTraceback()
        gzSupport.addError("Error calculating field values:" + outVal)    
    return outVal
Beispiel #25
0
def calcValue(row, attrs, calcString):
    # calculate a value based on fields and or other expressions
    if calcString.find("|") > -1:
        calcList = calcString.split("|")
    else:
        calcList = calcString.split("!")
    outVal = ""
    for strVal in calcList:
        if strVal in attrs:
            outVal += str(row.getValue(strVal))
        else:
            outVal += strVal
    try:
        outVal = eval(outVal)
    except:
        gzSupport.addMessage("Error evaluating:" + outVal)
        gzSupport.showTraceback()
        gzSupport.addError("Error calculating field values:" + outVal)
    return outVal
Beispiel #26
0
def checkGeometry(table):

    try:
        errTable = table + "_Check"
        if arcpy.Exists(errTable):
            arcpy.Delete_management(errTable)
            gzSupport.addMessage("Deleted existing " + errTable)

        arcpy.CheckGeometry_management(table,errTable)
        count = int(arcpy.GetCount_management(errTable).getOutput(0))
        if count == 0:
            gzSupport.addMessage("No Geometry Errors found")
            arcpy.Delete_management(errTable)
        else:
            gzSupport.addMessage(str(count) + " Errors located in " + errTable)
    except:
        gzSupport.showTraceback()
        gzSupport.addMessage("Unable to perform geometry check, see error listed above")
        count = 0

    return count
Beispiel #27
0
def joinToCsv(view, dataset, cadPath, cadName):
    retVal = False
    joinTo = ""
    if hasJoinTo(dataset) == True:
        try:
            joinTo = dataset.getAttributeNode("joinTo").nodeValue
            cadPart0 = cadName.split(".dwg")[0]
            csvFile = os.path.join(cadPath, cadPart0, cadPart0 + joinTo)
            if joinTo and joinTo != "":
                cadKey = dataset.getAttributeNode("cadKey").nodeValue
                csvKey = dataset.getAttributeNode("csvKey").nodeValue
                prefix = dataset.getAttributeNode("fieldPrefix").nodeValue
                tempTable = os.path.join(gzSupport.workspace, prefix)
                # Create temporary table
                if arcpy.Exists(tempTable):
                    arcpy.Delete_management(tempTable)
                if os.path.isfile(csvFile) == True:
                    arcpy.CopyRows_management(csvFile, tempTable)
                    arcpy.AddJoin_management(view, cadKey, tempTable, csvKey)
                    retVal = True
                else:
                    err = "Missing csv file - " + csvFile
                    gzSupport.addError(err)
                    gzSupport.logProcessError(cadName, gzSupport.sourceIDField,
                                              name, csvFile, err)

                    retVal = False
        except:
            err = "Unable to create join for " + name + ", " + csvFile
            gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name,
                                      csvFile, err)
            gzSupport.addError(err)
            gzSupport.showTraceback()
            retVal = False
        #finally:
        #    if arcpy.Exists(tempTable):
        #        arcpy.Delete_management(tempTable)

    return [retVal, view]
Beispiel #28
0
def checkGeometry(table):

    try:
        errTable = table + "_Check"
        if arcpy.Exists(errTable):
            arcpy.Delete_management(errTable)
            gzSupport.addMessage("Deleted existing " + errTable)

        arcpy.CheckGeometry_management(table, errTable)
        count = int(arcpy.GetCount_management(errTable).getOutput(0))
        if count == 0:
            gzSupport.addMessage("No Geometry Errors found")
            arcpy.Delete_management(errTable)
        else:
            gzSupport.addMessage(str(count) + " Errors located in " + errTable)
    except:
        gzSupport.showTraceback()
        gzSupport.addMessage(
            "Unable to perform geometry check, see error listed above")
        count = 0

    return count
Beispiel #29
0
def joinToCsv(view, dataset, cadPath, cadName):
    retVal = False
    joinTo = ""
    if hasJoinTo(dataset) == True:
        try:
            joinTo = dataset.getAttributeNode("joinTo").nodeValue
            cadPart0 = cadName.split(".dwg")[0]
            csvFile = os.path.join(cadPath, cadPart0, cadPart0 + joinTo)
            if joinTo and joinTo != "":
                cadKey = dataset.getAttributeNode("cadKey").nodeValue
                csvKey = dataset.getAttributeNode("csvKey").nodeValue
                prefix = dataset.getAttributeNode("fieldPrefix").nodeValue
                tempTable = os.path.join(gzSupport.workspace, prefix)
                # Create temporary table
                if arcpy.Exists(tempTable):
                    arcpy.Delete_management(tempTable)
                if os.path.isfile(csvFile) == True:
                    arcpy.CopyRows_management(csvFile, tempTable)
                    arcpy.AddJoin_management(view, cadKey, tempTable, csvKey)
                    retVal = True
                else:
                    err = "Missing csv file - " + csvFile
                    gzSupport.addError(err)
                    gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, csvFile, err)

                    retVal = False
        except:
            err = "Unable to create join for " + name + ", " + csvFile
            gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, csvFile, err)
            gzSupport.addError(err)
            gzSupport.showTraceback()
            retVal = False
        # finally:
        #    if arcpy.Exists(tempTable):
        #        arcpy.Delete_management(tempTable)

    return [retVal, view]
Beispiel #30
0
def main(argv = None):
    global targetWorkspace
    hasVersion = False
    desc = arcpy.Describe(gzSupport.workspace)
    if desc.workspaceType != "RemoteDatabase" and versionName == None:
        targetWorkspace = defaultWorkspace
    success = True
    arcpy.ResetProgressor()
    arcpy.env.Workspace = gzSupport.workspace
    uniqueValues = gzSupport.getFieldValues("Unique",fieldNames,datasets)[0]
    sources = gzSupport.listDatasets(gzSupport.workspace)
    sNames = sources[0]
    sFullNames = sources[1]
    arcpy.SetProgressor("Step","Load by " + str(fieldNames) + "...",0,len(uniqueValues)*len(datasets),1)
    for value in uniqueValues:
        try:
            hasVersion = False
            gzSupport.addMessage(value)
            if desc.workspaceType == "RemoteDatabase" and versionName != None:
                arcpy.SetProgressorLabel("Creating Version " + versionName)
                hasVersion = gzSupport.createVersion(defaultWorkspace,defaultVersionName,versionName)
            if hasVersion == True  or versionName == None or desc.workspaceType == "LocalDatabase":
                arcpy.env.Workspace = targetWorkspace
                targets = gzSupport.listDatasets(targetWorkspace)
                tNames = targets[0]
                tFullNames = targets[1]
                for dataset in datasets:
                    name = dataset.getAttributeNode("name").nodeValue
                    arcpy.SetProgressorLabel("Loading Dataset " + name)
                    targetTable = gzSupport.getFullName(name,tNames,tFullNames)
                    sourceTable = gzSupport.getFullName(name,sNames,sFullNames)
                    attrs = [f.name for f in arcpy.ListFields(targetTable)]
                    expr = getExpression(attrs,fieldNames,value)
                    arcpy.SetProgressorLabel("Loading Dataset " + name + " Where " + expr)
                    tName = targetTable[targetTable.rfind("\\")+1:]
                    tLocation = targetTable[0:targetTable.rfind("\\")]
                    if gzSupport.deleteRows(tLocation,tName,expr) == True:
                        retVal = gzSupport.appendRows(sourceTable,targetTable,expr)
                        if retVal == False:
                            success == False
                    else:
                        success == False
                    arcpy.SetProgressorPosition()
                if success == True:
                    if desc.workspaceType == "RemoteDatabase":
                        arcpy.SetProgressorLabel("Reconcile and Post")
                        retVal = gzSupport.reconcilePost(defaultWorkspace,versionName,defaultVersionName)
                        if retVal == False:
                            success = False
                            gzSupport.deleteVersion(defaultWorkspace,versionName)
                    elif desc.workspaceType == "LocalDatabase":
                        arcpy.SetProgressorLabel("Completed Update for " + str(value))
                    gzSupport.logDatasetProcess(targetTable,sys.argv[0],retVal)
                else:
                    gzSupport.logDatasetProcess(targetTable,sys.argv[0],retVal)

        except:
            gzSupport.showTraceback()
            success = False
            gzSupport.logDatasetProcess("Serious error",sys.argv[0],False)
        finally:
            arcpy.SetProgressorPosition()
            arcpy.ClearWorkspaceCache_management(defaultWorkspace)
    if success == False:
        gzSupport.addError("Errors occurred during process, look in log files for more information")        
    if gzSupport.ignoreErrors == True:
        success = True
    if desc.workspaceType == "RemoteDatabase" and success == True:
        analyze(defaultWorkspace,datasets,tNames,tFullNames)
    arcpy.SetParameter(SUCCESS, success)

    arcpy.ClearWorkspaceCache_management(defaultWorkspace)
    gzSupport.compressGDB(gzSupport.workspace)
    gzSupport.compressGDB(defaultWorkspace)
    gzSupport.closeLog()
    return
Beispiel #31
0
def writeDocument(sourceDataset,targetDataset,xmlFileName):
    desc = arcpy.Describe(sourceDataset)
    descT = arcpy.Describe(targetDataset)

    gzSupport.addMessage(sourceDataset)
    xmlDoc = Document()
    root = xmlDoc.createElement('Gizinta')
    xmlDoc.appendChild(root)
    root.setAttribute("logTableName",'gzLog')
    root.setAttribute("errorTableName",'gzError')
    root.setAttribute("version",'2013.1')
    root.setAttribute("xmlns:gizinta",'http://gizinta.com')

    extract = xmlDoc.createElement("Extract")
    root.appendChild(extract)

    dataElementName = getExtractElementName(desc,sourceDataset)

    source = xmlDoc.createElement(dataElementName)
    sourceName = getName(desc,sourceDataset)
    targetName = getName(descT,targetDataset)
    setDefaultProperties(source,dataElementName,sourceDataset,sourceName,targetName)
    where = xmlDoc.createElement("WhereClause")
    source.appendChild(where)
    extract.appendChild(source)

    transform = xmlDoc.createElement("Transform")
    root.appendChild(transform)

    dataset = xmlDoc.createElement("Dataset")
    transform.appendChild(dataset)
    dataset.setAttribute("name",targetName)
    dataset.setAttribute("qa","CheckFields,CheckGeometry")
    dataset.setAttribute("sourceIDField","")
    dataset.setAttribute("sourceNameField","")

    fields = getFields(descT,targetDataset)
    sourceFields = getFields(desc,sourceDataset)
    sourceNames = [field.name[field.name.rfind(".")+1:] for field in sourceFields]
    i=0
    try:
        for field in fields:
            fNode = xmlDoc.createElement("Field")
            dataset.appendChild(fNode)
            fieldName = field.name[field.name.rfind(".")+1:]
            if fieldName in sourceNames:
                addFieldElement(xmlDoc,fNode,"SourceName",fieldName)
            else:
                addFieldElement(xmlDoc,fNode,"SourceName","*"+fieldName+"*")

            addFieldElement(xmlDoc,fNode,"TargetName",fieldName)
            addFieldElement(xmlDoc,fNode,"Method","Copy")
            addFieldElement(xmlDoc,fNode,"FieldType",field.type)
            addFieldElement(xmlDoc,fNode,"FieldLength",str(field.length))
            i += 1
        setSourceFields(xmlDoc,dataset,sourceNames)
        # Should add a template section for value maps, maybe write domains...

        xmlStr = xmlDoc.toprettyxml()
        uglyXml = xmlDoc.toprettyxml(indent='	')
        text_re = re.compile('>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL)
        prettyXml = text_re.sub('>\g<1></', uglyXml)

        fHandle = open(xmlFileName, 'w')
        fHandle.write(prettyXml)
        fHandle.close()

    except:
        gzSupport.showTraceback()
        xmlStr =""
    return xmlStr
Beispiel #32
0
def main(argv = None):
    # process one or more drawings
    global log, playlists_xml, playlists
    outputSuccess = True # default value, will be set to False if any processing errors returned
    doImports()
    processed = 0
    errorCount = 0
    cfgfile = fixServerConfigPath(gseData_xml)
    xmlDataDoc = xml.dom.minidom.parse(cfgfile)
    gseData = gseDataSettings(xmlDataDoc)
    gss = []
    for playlist in playlists_xml:
        filepath = fixConfigPath(playlist)
        playlists.append(filepath)
        xmlDoc = xml.dom.minidom.parse(filepath)
        gsClass = gseSettings(xmlDoc,gseData)
        gss.append(gsClass)
        
    tm = time.strftime("%Y%m%d%H%M%S")
    
    logFile = gss[0].logFileName.replace('.log','_' + tm + '.log')
    log = open(logFile,'w')
    autoSync = gss[0].autoSync
    exitOnError = gss[0].exitOnError
        
    try:
        totalTime = gzSupport.timer(0)
        inputFiles = gzSupport.getFileList(gss[0].cadFolder,gss[0].fileExt,gss[0].minTime)

        for fileFound in inputFiles:
            if errorCount > 0 and exitOnError == True:
                break
            folder = fileFound[0]
            dwg = fileFound[1]
            cadFile = os.path.join(folder,dwg)
            drawingTime = gzSupport.timer(0)
            pVal = 0 # counter for playlist looping
            partFailed = False
            if(dwg.find(gss[pVal].nameContains) > -1) and os.path.exists(cadFile):
                msg("\n" + dwg)
                for playlist in playlists: # Loop through the playlists and do the loading from CAD
                    if cont(errorCount,exitOnError,partFailed): # stop processing if any errors or continue if exit on error param is false
                        retVal = doLoad(playlist,folder,dwg,gss[pVal]) # Load the playlist using FME subprocess
                        if(retVal != True):
                            outputSuccess = False
                            errorCount += 1
                            gss[pVal].loaded = False
                            partFailed = True
                        else:
                            gss[pVal].loaded = True
                    pVal += 1
                if cont(errorCount,exitOnError,partFailed):
                    pVal = 0
                    if partFailed == False and autoSync == True: # Sync is param set and no errors have been returned
                        retVal = doSync(playlists,folder,dwg,gss[pVal]) # sync from Staging to Production
                        if(retVal != True):
                            outputSuccess = False
                            errorCount += 1
                        else:
                            for playlist in playlists: # go back through the playlists and Sync for this drawing
                                gss[pVal].syncd = True
                                pVal += 1
                loaded = False
                for gs in gss:
                    if (gs.loaded == True or gs.syncd == True) and dwg.find(gs.nameContains) > -1: # if any load or sync processing happened...
                        loaded = True
                if loaded == True:
                    msg(dwg + " total processing time: " + getTimeElapsed(drawingTime))
                    processed += 1
                    if gss[0].deleteCADFiles == True and partFailed == False:
                        try:
                            gzSupport.cleanupGarbage()
                            os.remove(cadFile)
                            try:
                                os.remove(cadFile[:len(cadFile)-4]+'.wld')
                            except:
                                pass
                            msg(cadFile + " deleted")
                        except:
                            msg("Unable to delete CAD file " + cadFile + "... continuing")
                if processed % 10 == 0:
                    msg("Processed " + str(processed))
                gzSupport.cleanupGarbage()
    except:
        errorCount += 1
        msg("A fatal error was encountered in gseLoaderFME.py")
        gzSupport.showTraceback()
        outputSuccess = False
        logProcess("gseLoaderFME","drawings",outputSuccess,gss[0].stagingWS)

    finally:
        arcpy.SetParameterAsText(successParam,outputSuccess)
        msg("\nTotal Number of Errors = " + str(errorCount))
        msg("outputSuccess set to: " + str(outputSuccess))
        msg(str(processed) + " drawings processed")
        msg("Total Processing time: " + getTimeElapsed(totalTime) + "\n")
        del gss, playlists
        log.close()
def main(argv=None):
    success = True
    targetName = ''
    try:
        if not arcpy.Exists(gzSupport.workspace):
            gzSupport.addMessage(gzSupport.workspace +
                                 " does not exist, attempting to create")
            gzSupport.createGizintaGeodatabase()
        else:
            gzSupport.compressGDB(gzSupport.workspace)
        if len(datasets) > 0:
            progBar = len(datasets) + 1
            arcpy.SetProgressor("step", "Importing Datasets...", 0, progBar, 1)
            #gzSupport.deleteExistingRows(datasets)
            arcpy.SetProgressorPosition()
        for dataset in datasets:
            gzSupport.sourceIDField = dataset.getAttributeNode(
                "sourceIDField").nodeValue
            sourceName = dataset.getAttributeNode("sourceName").nodeValue
            targetName = dataset.getAttributeNode("targetName").nodeValue
            xmlFields = gzSupport.getXmlElements(gzSupport.xmlFileName,
                                                 "Field")
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " +
                                     targetName + "...")
            if not arcpy.Exists(os.path.join(sourceWorkspace, sourceName)):
                gzSupport.addError(
                    os.path.join(sourceWorkspace,
                                 sourceName + " does not exist, exiting"))
                return
            if not arcpy.Exists(os.path.join(gzSupport.workspace, targetName)):
                gzSupport.addMessage(
                    os.path.join(gzSupport.workspace, targetName) +
                    " does not exist")
            else:
                arcpy.Delete_management(
                    os.path.join(gzSupport.workspace, targetName))

            arcpy.env.Workspace = gzSupport.workspace
            try:
                retVal = gzSupport.exportDataset(sourceWorkspace, sourceName,
                                                 targetName, dataset,
                                                 xmlFields)
                if retVal == False:
                    success = False
            except:
                gzSupport.showTraceback()
                success = False
                retVal = False
            gzSupport.logDatasetProcess(sourceName, targetName, retVal)
        arcpy.SetProgressorPosition()
    except:
        gzSupport.showTraceback()
        gzSupport.addError("A Fatal Error occurred")
        success = False
        gzSupport.logDatasetProcess("extractWorkspaceToGDB", targetName, False)
    finally:
        arcpy.ResetProgressor()
        arcpy.RefreshCatalog(gzSupport.workspace)
        try:
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
            arcpy.ClearWorkspaceCache_management(sourceWorkspace)
        except:
            gzSupport.addMessage("Unable to clear workspace cache, continuing")

    if success == False:
        gzSupport.addError(
            "Errors occurred during process, look in log files for more information"
        )
    if gzSupport.ignoreErrors == True:
        success = True

    gzSupport.closeLog()
    arcpy.SetParameter(SUCCESS, success)
Beispiel #34
0
def writeDocument(sourceDataset, targetDataset, xmlFileName):
    desc = arcpy.Describe(sourceDataset)
    descT = arcpy.Describe(targetDataset)

    gzSupport.addMessage(sourceDataset)
    xmlDoc = Document()
    root = xmlDoc.createElement('Gizinta')
    xmlDoc.appendChild(root)
    root.setAttribute("logTableName", 'gzLog')
    root.setAttribute("errorTableName", 'gzError')
    root.setAttribute("version", '2013.1')
    root.setAttribute("xmlns:gizinta", 'http://gizinta.com')

    extract = xmlDoc.createElement("Extract")
    root.appendChild(extract)

    dataElementName = getExtractElementName(desc, sourceDataset)

    source = xmlDoc.createElement(dataElementName)
    sourceName = getName(desc, sourceDataset)
    targetName = getName(descT, targetDataset)
    setDefaultProperties(source, dataElementName, sourceDataset, sourceName,
                         targetName)
    where = xmlDoc.createElement("WhereClause")
    source.appendChild(where)
    extract.appendChild(source)

    transform = xmlDoc.createElement("Transform")
    root.appendChild(transform)

    dataset = xmlDoc.createElement("Dataset")
    transform.appendChild(dataset)
    dataset.setAttribute("name", targetName)
    dataset.setAttribute("qa", "CheckFields,CheckGeometry")
    dataset.setAttribute("sourceIDField", "")
    dataset.setAttribute("sourceNameField", "")

    fields = getFields(descT, targetDataset)
    sourceFields = getFields(desc, sourceDataset)
    sourceNames = [
        field.name[field.name.rfind(".") + 1:] for field in sourceFields
    ]
    i = 0
    try:
        for field in fields:
            fNode = xmlDoc.createElement("Field")
            dataset.appendChild(fNode)
            fieldName = field.name[field.name.rfind(".") + 1:]
            if fieldName in sourceNames:
                addFieldElement(xmlDoc, fNode, "SourceName", fieldName)
            else:
                addFieldElement(xmlDoc, fNode, "SourceName",
                                "*" + fieldName + "*")

            addFieldElement(xmlDoc, fNode, "TargetName", fieldName)
            addFieldElement(xmlDoc, fNode, "Method", "Copy")
            addFieldElement(xmlDoc, fNode, "FieldType", field.type)
            addFieldElement(xmlDoc, fNode, "FieldLength", str(field.length))
            i += 1
        setSourceFields(xmlDoc, dataset, sourceNames)
        # Should add a template section for value maps, maybe write domains...

        xmlStr = xmlDoc.toprettyxml()
        uglyXml = xmlDoc.toprettyxml(indent='	', encoding="utf-8")
        text_re = re.compile('>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL)
        prettyXml = text_re.sub('>\g<1></', uglyXml)

        fHandle = open(xmlFileName, 'w')
        fHandle.write(prettyXml)
        fHandle.close()

    except:
        gzSupport.showTraceback()
        xmlStr = ""
    return xmlStr
Beispiel #35
0
def main(argv=None):
    # main function - list the source and target datasets, then delete rows/append where there is a match on non-prefixed name
    dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
    logname = os.path.join(outputFolder, 'gzCreateProjectFiles.log')
    gzSupport.startLog()

    success = True
    try:

        gzSupport.addMessage("Getting list of datasets for Target " +
                             targetGDB)
        targets = gzSupport.listDatasets(targetGDB)
        tNames = targets[0]
        tFullNames = targets[1]

        gzSupport.addMessage("Getting list of datasets for Source " +
                             sourceGDB)
        sources = gzSupport.listDatasets(sourceGDB)
        sNames = sources[0]
        sFullNames = sources[1]

        t = 0
        arcpy.SetProgressor("Step", "Creating Files...", 0, len(tNames), 1)

        for name in tNames:
            arcpy.SetProgressorPosition(t)
            arcpy.SetProgressorLabel("Creating file for " + name + "...")
            # for each source name
            if debug:
                gzSupport.addMessage(name)
            try:
                # look for the matching name in target names
                s = sNames.index(name)
            except:
                # will get here if no match
                s = -1
            if s > -1:
                # create file if there is a match
                fileName = outputFolder + os.sep + prefixStr + name.title(
                ) + ".xml"
                if os.path.exists(fileName):
                    os.remove(fileName)
                try:
                    #arcpy.AddToolbox(os.path.join(dir,"Gizinta.tbx"))
                    #arcpy.gzCreateProject_gizinta(sFullNames[s],tFullNames[t],fileName) # this doesn't always work...
                    gzCreateProject.createGzFile(sFullNames[s], tFullNames[t],
                                                 fileName)
                    retVal = True
                    gzSupport.addMessage("Created " + fileName)
                except:
                    retVal = False
                if retVal == False:
                    gzSupport.addMessage("Failed to create file for " + name)
                    gzSupport.showTraceback()
                    success = False
            else:
                gzSupport.addMessage("Skipping " + name)
            t = t + 1
    except:
        gzSupport.showTraceback()
        arcpy.AddError("Error creating project files")
        success = False

    finally:
        arcpy.ResetProgressor()
        arcpy.SetParameter(gzSupport.successParameterNumber, success)
        arcpy.env.workspace = targetGDB
        arcpy.RefreshCatalog(outputFolder)
        gzSupport.closeLog()
Beispiel #36
0
                     if sourceTest == '':
                         sourceTest = None
                 if mapExpr and mapExpr != "":
                     currentValue = calcValue(row,attrs,mapExpr)
                 if currentValue == sourceTest or currentValue == sourceValue: # this will check numeric and non-numeric equivalency for current values in value maps
                     found = True
                     try:
                         idx = sourceValues.index(sourceValue)
                         newValue = targetValues[idx]
                         row.setValue(targetName,newValue)
                     except:
                         errCount += 1
                         row.setValue(targetName,currentValue)
                         success = False
                         err = "Unable to map values for " + targetName + ", value = " + str(newValue)
                         gzSupport.showTraceback()
                         gzSupport.addError(err)
                         gzSupport.logProcessError(row.getValue(gzSupport.sourceNameField),gzSupport.sourceIDField,row.getValue(gzSupport.sourceIDField),targetName,err)
                         
 if not found:
     if otherwise and str(otherwise) != "None":
         otherwise = str(otherwise)
         if otherwise.count(" ") > 2 or otherwise.count("!") > 1:
             otherwise = calcValue(row,attrs,otherwise)
             #gzSupport.addMessage(otherwise)
         row.setValue(targetName,otherwise)
     else:
         errCount += 1
         success = False
         err = "Unable to find map value (otherwise) for " + str(targetName) + ", value = " + str(currentValue)
         gzSupport.addError(err)
Beispiel #37
0
def main(argv=None):
    success = True
    if not arcpy.Exists(gzSupport.workspace):
        gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create")
        gzSupport.createGizintaGeodatabase()
    else:
        gzSupport.compressGDB(gzSupport.workspace)
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    try:
        gzSupport.addMessage("Looking for drawings modified since " + since)
        minTime = datetime.datetime.strptime(since, "%d/%m/%Y %I:%M:%S %p")
        cadFiles = gzSupport.getFileList(cadFolder, cadExt, minTime)
        if len(cadFiles) > 0:
            progBar = len(cadFiles) + 1
            arcpy.SetProgressor("step", "Importing Drawings...", 0, progBar, 1)
            arcpy.SetProgressorPosition()
            gzSupport.deleteExistingRows(datasets)
        for item in cadFiles:
            cadPath = item[0]
            cadName = item[1]
            gzSupport.addMessage("Importing Drawing " + cadName)

            for dataset in datasets:
                try:
                    name = dataset.getAttributeNode("sourceName").nodeValue
                except:
                    name = dataset.getAttributeNode("name").nodeValue

                gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue
                xmlFields = gzSupport.getXmlElements(gzSupport.xmlFileName, "Field")
                arcpy.SetProgressorLabel("Loading " + name + " for " + cadName + "...")
                arcpy.env.Workspace = gzSupport.workspace
                targetName = dataset.getAttributeNode("targetName").nodeValue
                sourceWorkspace = os.path.join(cadPath, cadName)
                exists = False
                if not arcpy.Exists(os.path.join(gzSupport.workspace, targetName)):
                    gzSupport.addMessage(os.path.join(gzSupport.workspace, targetName) + " does not exist")
                else:
                    exists = True
                    # arcpy.Delete_management(os.path.join(gzSupport.workspace,targetName))

                try:
                    if not exists == True:
                        retVal = gzSupport.exportDataset(sourceWorkspace, name, targetName, dataset, xmlFields)
                        addDrawingField(os.path.join(gzSupport.workspace, targetName), cadName)
                    else:
                        retVal = importLayer(cadPath, cadName, dataset)
                        addDrawingField(os.path.join(gzSupport.workspace, targetName), cadName)
                    if retVal == False:
                        success = False
                except:
                    gzSupport.showTraceback()
                    success = False
                    retVal = False

                arcpy.env.Workspace = gzSupport.workspace
                gzSupport.logDatasetProcess(cadName, name, retVal)
                gzSupport.cleanupGarbage()
            arcpy.SetProgressorPosition()
    except:
        gzSupport.addError("A Fatal Error occurred")
        gzSupport.showTraceback()
        success = False
        gzSupport.logDatasetProcess("", "", False)
    finally:
        arcpy.ResetProgressor()
        arcpy.RefreshCatalog(gzSupport.workspace)
        arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
        gzSupport.cleanupGarbage()

    if success == False:
        gzSupport.addError("Errors occurred during process, look in log files for more information")
    if gzSupport.ignoreErrors == True:
        success = True
    gzSupport.closeLog()
    arcpy.SetParameter(SUCCESS, success)
def main(argv=None):
    global targetWorkspace
    hasVersion = False
    desc = arcpy.Describe(gzSupport.workspace)
    if desc.workspaceType != "RemoteDatabase" and versionName == None:
        targetWorkspace = defaultWorkspace
    success = True
    arcpy.ResetProgressor()
    arcpy.env.Workspace = gzSupport.workspace
    uniqueValues = gzSupport.getFieldValues("Unique", fieldNames, datasets)[0]
    sources = gzSupport.listDatasets(gzSupport.workspace)
    sNames = sources[0]
    sFullNames = sources[1]
    arcpy.SetProgressor("Step", "Load by " + str(fieldNames) + "...", 0, len(uniqueValues) * len(datasets), 1)
    for value in uniqueValues:
        try:
            hasVersion = False
            gzSupport.addMessage(value)
            if desc.workspaceType == "RemoteDatabase" and versionName != None:
                arcpy.SetProgressorLabel("Creating Version " + versionName)
                hasVersion = gzSupport.createVersion(defaultWorkspace, defaultVersionName, versionName)
            if hasVersion == True or versionName == None or desc.workspaceType == "LocalDatabase":
                arcpy.env.Workspace = targetWorkspace
                targets = gzSupport.listDatasets(targetWorkspace)
                tNames = targets[0]
                tFullNames = targets[1]
                for dataset in datasets:
                    name = dataset.getAttributeNode("name").nodeValue
                    arcpy.SetProgressorLabel("Loading Dataset " + name)
                    targetTable = gzSupport.getFullName(name, tNames, tFullNames)
                    sourceTable = gzSupport.getFullName(name, sNames, sFullNames)
                    attrs = [f.name for f in arcpy.ListFields(targetTable)]
                    expr = getExpression(attrs, fieldNames, value)
                    arcpy.SetProgressorLabel("Loading Dataset " + name + " Where " + expr)
                    tName = targetTable[targetTable.rfind("\\") + 1 :]
                    tLocation = targetTable[0 : targetTable.rfind("\\")]
                    if gzSupport.deleteRows(tLocation, tName, expr) == True:
                        retVal = gzSupport.appendRows(sourceTable, targetTable, expr)
                        if retVal == False:
                            success == False
                    else:
                        success == False
                    arcpy.SetProgressorPosition()
                if success == True:
                    if desc.workspaceType == "RemoteDatabase":
                        arcpy.SetProgressorLabel("Reconcile and Post")
                        retVal = gzSupport.reconcilePost(defaultWorkspace, versionName, defaultVersionName)
                        if retVal == False:
                            success = False
                            gzSupport.deleteVersion(defaultWorkspace, versionName)
                    elif desc.workspaceType == "LocalDatabase":
                        arcpy.SetProgressorLabel("Completed Update for " + str(value))
                    gzSupport.logDatasetProcess(targetTable, sys.argv[0], retVal)
                else:
                    gzSupport.logDatasetProcess(targetTable, sys.argv[0], retVal)
                gzSupport.cleanupGarbage()

        except:
            gzSupport.showTraceback()
            success = False
            gzSupport.logDatasetProcess("Serious error", sys.argv[0], False)
        finally:
            arcpy.SetProgressorPosition()
            arcpy.ClearWorkspaceCache_management(defaultWorkspace)
    if success == False:
        gzSupport.addError("Errors occurred during process, look in log files for more information")
    if gzSupport.ignoreErrors == True:
        success = True
    if desc.workspaceType == "RemoteDatabase" and success == True:
        analyze(defaultWorkspace, datasets, tNames, tFullNames)
    arcpy.SetParameter(SUCCESS, success)

    arcpy.ClearWorkspaceCache_management(defaultWorkspace)
    gzSupport.compressGDB(gzSupport.workspace)
    gzSupport.compressGDB(defaultWorkspace)
    gzSupport.closeLog()
    return
Beispiel #39
0
def main(argv=None):
    success = True
    if not arcpy.Exists(gzSupport.workspace):
        gzSupport.addMessage(gzSupport.workspace +
                             " does not exist, attempting to create")
        gzSupport.createGizintaGeodatabase()
    else:
        gzSupport.compressGDB(gzSupport.workspace)
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    try:
        gzSupport.addMessage("Looking for drawings modified since " + since)
        minTime = dt.datetime.strptime(since, "%d/%m/%Y %I:%M:%S %p")
        cadFiles = getFileList(cadFolder, cadExt, minTime)
        if len(cadFiles) > 0:
            progBar = len(cadFiles) + 1
            arcpy.SetProgressor("step", "Importing Drawings...", 0, progBar, 1)
            deleteExistingRows(datasets)
            arcpy.SetProgressorPosition()
        for item in cadFiles:
            cadPath = item[0]
            cadName = item[1]
            gzSupport.addMessage("Importing Drawing " + cadName)

            for dataset in datasets:
                try:
                    name = dataset.getAttributeNode("sourceName").nodeValue
                except:
                    name = dataset.getAttributeNode("name").nodeValue

                gzSupport.sourceIDField = dataset.getAttributeNode(
                    "sourceIDField").nodeValue
                arcpy.SetProgressorLabel("Loading " + name + " for " +
                                         cadName + "...")
                arcpy.env.Workspace = gzSupport.workspace
                targetName = dataset.getAttributeNode("targetName").nodeValue
                sourceWorkspace = os.path.join(cadPath, cadName)
                if not arcpy.Exists(
                        os.path.join(gzSupport.workspace, targetName)):
                    gzSupport.addMessage(
                        os.path.join(gzSupport.workspace, targetName) +
                        " does not exist")
                    mode = "export"
                else:
                    mode = "import"

                try:
                    if mode == "import":
                        retVal = gzSupport.importDataset(
                            sourceWorkspace, name, targetName, dataset)
                    elif mode == "export":
                        retVal = gzSupport.exportDataset(
                            sourceWorkspace, name, targetName, dataset)
                    #retVal = importLayer(cadPath,cadName,dataset)
                    if retVal == False:
                        success = False
                except:
                    gzSupport.showTraceback()
                    success = False
                    retVal = False

                arcpy.env.Workspace = gzSupport.workspace
                gzSupport.logDatasetProcess(cadName, name, retVal)
                gzSupport.cleanupGarbage()
            arcpy.SetProgressorPosition()
    except:
        gzSupport.addError("A Fatal Error occurred")
        gzSupport.showTraceback()
        success = False
        gzSupport.logDatasetProcess("", "", False)
    finally:
        arcpy.ResetProgressor()
        arcpy.RefreshCatalog(gzSupport.workspace)
        arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
        gzSupport.cleanupGarbage()

    if success == False:
        gzSupport.addError(
            "Errors occurred during process, look in log files for more information"
        )
    if gzSupport.ignoreErrors == True:
        success = True
    gzSupport.closeLog()
    arcpy.SetParameter(SUCCESS, success)
Beispiel #40
0
                                            row, attrs, mapExpr)
                                    if currentValue == sourceTest or currentValue == sourceValue:  # this will check numeric and non-numeric equivalency for current values in value maps
                                        found = True
                                        try:
                                            idx = sourceValues.index(
                                                sourceValue)
                                            newValue = targetValues[idx]
                                            row.setValue(targetName, newValue)
                                        except:
                                            errCount += 1
                                            row.setValue(
                                                targetName, currentValue)
                                            success = False
                                            err = "Unable to map values for " + targetName + ", value = " + str(
                                                newValue)
                                            gzSupport.showTraceback()
                                            gzSupport.addError(err)
                                            gzSupport.logProcessError(
                                                row.getValue(
                                                    gzSupport.sourceNameField),
                                                gzSupport.sourceIDField,
                                                row.getValue(
                                                    gzSupport.sourceIDField),
                                                targetName, err)

                    if not found:
                        if otherwise and str(otherwise) != "None":
                            otherwise = str(otherwise)
                            if otherwise.count(" ") > 2 or otherwise.count(
                                    "!") > 1:
                                otherwise = calcValue(row, attrs, otherwise)