Exemplo n.º 1
0
def main(argv=None):
    success = True
    gzSupport.compressGDB(gzSupport.workspace)
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    tables = gzSupport.listDatasets(gzSupport.workspace)
    tNames = tables[0]
    tFullNames = tables[1]
    name = ''

    for dataset in datasets:
        arcpy.env.Workspace = gzSupport.workspace
        name = dataset.getAttributeNode("name").nodeValue
        table = gzSupport.getFullName(name, tNames, tFullNames)
        gzSupport.sourceIDField = dataset.getAttributeNode(
            "sourceIDField").nodeValue
        gzSupport.sourceNameField = dataset.getAttributeNode(
            "sourceNameField").nodeValue
        if not arcpy.Exists(table):
            gzSupport.addError("Feature Class " + table +
                               " does not exist, exiting")
            arcpy.SetParameter(SUCCESS, False)
            return
        if not arcpy.TestSchemaLock(table):
            gzSupport.addError("Unable to obtain a schema lock for " + table +
                               ", exiting")
            arcpy.SetParameter(SUCCESS, False)
            return -1
        desc = arcpy.Describe(table)
        fields = dataset.getElementsByTagName("Field")
        try:
            attrs = [f.name for f in arcpy.ListFields(table)]
            for field in fields:
                arcpy.env.Workspace = gzSupport.workspace
                targetName = gzSupport.getNodeValue(field, "TargetName")
                gzSupport.addGizintaField(table, targetName, field, attrs)

            retVal = setFieldValues(table, fields)
            if retVal == False:
                success = False
            gzSupport.logDatasetProcess(name, "Fields", retVal)
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
            gzSupport.cleanupGarbage()

        except:
            gzSupport.showTraceback()
            success = False
            gzSupport.logDatasetProcess("fieldCalculator", name, False)
        finally:
            arcpy.RefreshCatalog(table)
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    if success == False:
        gzSupport.addError(
            "Errors occurred during process, look in log file tools\\log\\fieldCalculator.log for more information"
        )
    if gzSupport.ignoreErrors == True:
        success = True
    arcpy.SetParameter(SUCCESS, success)
    arcpy.ResetProgressor()
    gzSupport.closeLog()
    return
Exemplo n.º 2
0
def importLayer(cadPath, cadName, dataset):
    result = False
    try:
        name = dataset.getAttributeNode("targetName").nodeValue
    except:
        name = dataset.getAttributeNode("name").nodeValue

    table = os.path.join(gzSupport.workspace, name)
    layerName = dataset.getAttributeNode("sourceName").nodeValue
    layer = os.path.join(cadPath, cadName, layerName)
    gzSupport.addMessage("Importing Layer " + layer)

    try:
        whereClause = gzSupport.getNodeValue(dataset, "WhereClause")
        xmlFields = dataset.getElementsByTagName("Field")
        gzSupport.addMessage("Where " + whereClause)
        if not arcpy.Exists(table):
            err = "Feature Class " + name + " does not exist"
            gzSupport.addError(err)
            gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name,
                                      name, err)
            return False
        if whereClause != '':
            view = gzSupport.makeFeatureView(gzSupport.workspace, layer,
                                             layerName + "_View", whereClause,
                                             xmlFields)
        else:
            view = layer
        count = arcpy.GetCount_management(view).getOutput(0)
        gzSupport.addMessage(str(count) + " source Features for " + name)

        if hasJoinTo(dataset) == True:
            res = joinToCsv(view, dataset, cadPath, cadName)
            result = res[0]
            view = res[1]
        else:
            view = view
            result = True

        if result == True and count > 0:
            arcpy.Append_management([view], table, "NO_TEST", "", "")
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)

    except:
        err = "Failed to import layer " + name
        gzSupport.addError(err)
        gzSupport.showTraceback()
        gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name,
                                  layerName, err)
    gzSupport.cleanupGarbage()
    try:
        del view
    except:
        gzSupport.addMessage("")
    return result
Exemplo n.º 3
0
def main(argv = None):
    success = True
    gzSupport.compressGDB(gzSupport.workspace)
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    tables = gzSupport.listDatasets(gzSupport.workspace)
    tNames = tables[0]
    tFullNames = tables[1]
    name = ''
    
    for dataset in datasets:
        arcpy.env.Workspace = gzSupport.workspace
        name = dataset.getAttributeNode("name").nodeValue
        table = gzSupport.getFullName(name,tNames,tFullNames)
        gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue
        gzSupport.sourceNameField = dataset.getAttributeNode("sourceNameField").nodeValue
        if not arcpy.Exists(table):
            gzSupport.addError("Feature Class " + table + " does not exist, exiting")
            arcpy.SetParameter(SUCCESS, False)
            return
        if not arcpy.TestSchemaLock(table):
            gzSupport.addError("Unable to obtain a schema lock for " + table + ", exiting")
            arcpy.SetParameter(SUCCESS, False)
            return -1
        desc = arcpy.Describe(table)
        fields = dataset.getElementsByTagName("Field")
        try:
            attrs = [f.name for f in arcpy.ListFields(table)]
            for field in fields:
                arcpy.env.Workspace = gzSupport.workspace
                targetName = gzSupport.getNodeValue(field,"TargetName")
                gzSupport.addGizintaField(table,targetName,field,attrs)

            retVal = setFieldValues(table,fields)
            if retVal == False:
                success = False
            gzSupport.logDatasetProcess(name,"Fields",retVal)
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
            gzSupport.cleanupGarbage()

        except:
            gzSupport.showTraceback()
            success = False
            gzSupport.logDatasetProcess("fieldCalculator",name,False)
        finally:
            arcpy.RefreshCatalog(table)
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    if success == False:
        gzSupport.addError("Errors occurred during process, look in log file tools\\log\\fieldCalculator.log for more information")
    if gzSupport.ignoreErrors == True:
        success = True
    arcpy.SetParameter(SUCCESS, success)
    arcpy.ResetProgressor()
    gzSupport.closeLog()
    return
Exemplo n.º 4
0
def importLayer(cadPath, cadName, dataset):
    result = False
    try:
        name = dataset.getAttributeNode("targetName").nodeValue
    except:
        name = dataset.getAttributeNode("name").nodeValue

    table = os.path.join(gzSupport.workspace, name)
    layerName = dataset.getAttributeNode("sourceName").nodeValue
    layer = os.path.join(cadPath, cadName, layerName)
    gzSupport.addMessage("Importing Layer " + layer)

    try:
        whereClause = gzSupport.getNodeValue(dataset, "WhereClause")
        xmlFields = dataset.getElementsByTagName("Field")
        gzSupport.addMessage("Where " + whereClause)
        if not arcpy.Exists(table):
            err = "Feature Class " + name + " does not exist"
            gzSupport.addError(err)
            gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, name, err)
            return False
        if whereClause != "":
            view = gzSupport.makeFeatureView(gzSupport.workspace, layer, layerName + "_View", whereClause, xmlFields)
        else:
            view = layer
        count = arcpy.GetCount_management(view).getOutput(0)
        gzSupport.addMessage(str(count) + " source Features for " + name)

        if hasJoinTo(dataset) == True:
            res = joinToCsv(view, dataset, cadPath, cadName)
            result = res[0]
            view = res[1]
        else:
            view = view
            result = True

        if result == True and count > 0:
            arcpy.Append_management([view], table, "NO_TEST", "", "")
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)

    except:
        err = "Failed to import layer " + name
        gzSupport.addError(err)
        gzSupport.showTraceback()
        gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name, layerName, err)
    gzSupport.cleanupGarbage()
    try:
        del view
    except:
        gzSupport.addMessage("")
    return result
Exemplo n.º 5
0
def doAppend(source,target):
    # perform the append from a source table to a target table
    success = False
    if arcpy.Exists(target):
        gzSupport.addMessage("Appending " + source + " |TO| " + target)
        arcpy.Append_management(source,target, "NO_TEST")
        success = True
        if debug:
            gzSupport.addMessage("completed")
    else:
        gzSupport.addMessage("Target: " + target + " does not exist")

    gzSupport.cleanupGarbage()
    return success
Exemplo n.º 6
0
def main(argv = None):
    # main function - list the source and target datasets, then delete rows/append where there is a match on non-prefixed name
    success = True
    name = ''
    try:
        if len(datasetNames) == 0:
            sources = gzSupport.listDatasets(sourceGDB)
            sNames = sources[0]
            sFullNames = sources[1]
            targets = gzSupport.listDatasets(targetGDB)
            tNames = targets[0]
            tFullNames = targets[1]
        else:
            sNames = datasetNames
        s = 0
        arcpy.SetProgressor("Step","Replacing rows...",0,len(sNames),1)
        for name in sNames:
            arcpy.SetProgressorPosition(s)
            arcpy.SetProgressorLabel(" Replacing rows using " + name + "...")
            # for each source name
            if debug:
                gzSupport.addMessage(name)
            target = os.path.join(targetGDB,name)
            if arcpy.Exists(target):
                # append if there is a match
                if len(datasetNames) == 0 or gzSupport.nameTrimmer(name) in datasetNames:
                    retVal = doInlineAppend(os.path.join(sourceGDB,name),target)
                    gzSupport.logDatasetProcess("replaceRows",name,retVal)
                    if retVal == False:
                        success = False
                    gzSupport.cleanupGarbage()
                else:
                    gzSupport.addMessage("Skipping "  + gzSupport.nameTrimmer(name))
            s = s + 1
    except:
        gzSupport.showTraceback()
        arcpy.AddError("Unable to update datasets")
        success = False
        gzSupport.logDatasetProcess("replaceRows",name,success)

    finally:
        arcpy.ResetProgressor()
        arcpy.SetParameter(SUCCESS, success)
        arcpy.ClearWorkspaceCache_management(targetGDB)
        gzSupport.compressGDB(targetGDB)
        gzSupport.closeLog()
Exemplo n.º 7
0
def doTruncate(target):
    # perform the append from a source table to a target table
    success = False
    try:
        if arcpy.Exists(target):
            gzSupport.addMessage("Deleting rows in " + target)
            arcpy.DeleteRows_management(target)
            success = True
            if debug:
                gzSupport.addMessage("Deleted")
        else:
            gzSupport.addMessage("Target: " + target + " does not exist")
        gzSupport.cleanupGarbage()
    except:
        gzSupport.addMessage("Unable to delete rows for: " + target )
        # assume this is a view or something that can't be deleted if only some things are not deleted.
        
    return success
Exemplo n.º 8
0
def doSync(playlists,folder,dwg,gs):
    # Sync process drawing
    global log
    inputDrawing = os.path.join(folder,dwg)
    drawingTime = gzSupport.timer(0)
    msg("Sync changes to database for " + dwg)
    
    # sync changes
    try:
        #result = arcpy.gseSyncChanges_gse(inputDrawing," ".join(playlists),gs.stagingWS,gs.productionWS)
        # the arcpy approach caused issues so a direct sync call has turned out to be more reliable...
        retVal = gseSyncChanges.sync(inputDrawing," ".join(playlists),gs.stagingWS,gs.productionWS,log)
    except:
        msg("Error encountered, sync failed...")
        retVal = False
    logProcess("Sync to Production",dwg,retVal,gs.productionWS)
    msg(dwg + " Sync processing time: " + getTimeElapsed(drawingTime) )
    del inputDrawing
    gzSupport.cleanupGarbage()

    if retVal == False:
        msg("return value set to: " + str(retVal))
    return retVal
Exemplo n.º 9
0
def doLoad(playlist_xml,folder,dwg,gs):
    # Load process drawing
    global log
    outputSuccess = False # default value

    inputDrawing = os.path.join(folder,dwg)
    drawingTime = gzSupport.timer(0)
    msg("\nLoading " + dwg + " to database")
    # load using FME
    if gs.fmeLoadFile == None or gs.fmeLoadFile == "" or gs.fmeLoadFile == gse.fmeFolder:
        msg("No FME file for loading")
        retVal = True
    else:
        retVal = gseRunFME.load(inputDrawing,gs.fmeExe,gs.fmeLoadFile,gs.stagingWS,gs.productionWS,gs.sourceEPSG,gs.runas,gs.truncate,playlist_xml,gs.source,
                getFeatureTypes(playlist_xml,"sourceName"),getFeatureTypes(playlist_xml,"targetName"))
        msg("FME processing time: " + getTimeElapsed(drawingTime))
        logProcess(gs.fmeLoadFile[:gs.fmeLoadFile.rfind(os.sep)+1],dwg,retVal,gs.stagingWS)
    msg(dwg + " Load processing time: " + getTimeElapsed(drawingTime) )
    gzSupport.cleanupGarbage()

    if retVal == False:
        msg("return value set to: " + str(retVal))

    return retVal
Exemplo n.º 10
0
                            gzSupport.showTraceback()
                            gzSupport.addError(err)
                            gzSupport.logProcessError(row.getValue(gzSupport.sourceNameField),gzSupport.sourceIDField,row.getValue(gzSupport.sourceIDField),targetName,err)
                            errCount += 1
                    #else:
                    #    errCount += 1
                    #    success = False
                    #    err = "Blank or null value calculated: unable to set value for " + targetName + " " + str(calcString)
                    #    gzSupport.addError(err)
                    #    gzSupport.logProcessError(row.getValue(gzSupport.sourceNameField),gzSupport.sourceIDField,row.getValue(gzSupport.sourceIDField),targetName,err)
                            
            try:
                updateCursor.updateRow(row)
            except:
                errCount += 1
                success = False
                err = "Exception caught: unable to update row"
                gzSupport.showTraceback()
                gzSupport.addError(err)
                gzSupport.logProcessError(row.getValue(gzSupport.sourceNameField),gzSupport.sourceIDField,row.getValue(gzSupport.sourceIDField),"One of the values",err)
            row = updateCursor.next()
            
    del updateCursor
    gzSupport.cleanupGarbage()
    arcpy.ResetProgressor()

    return success

if __name__ == "__main__":
    main()
Exemplo n.º 11
0
def main(argv=None):
    global targetWorkspace
    hasVersion = False
    desc = arcpy.Describe(gzSupport.workspace)
    if desc.workspaceType != "RemoteDatabase" and versionName == None:
        targetWorkspace = defaultWorkspace
    success = True
    arcpy.ResetProgressor()
    arcpy.env.Workspace = gzSupport.workspace
    uniqueValues = gzSupport.getFieldValues("Unique", fieldNames, datasets)[0]
    sources = gzSupport.listDatasets(gzSupport.workspace)
    sNames = sources[0]
    sFullNames = sources[1]
    arcpy.SetProgressor("Step", "Load by " + str(fieldNames) + "...", 0, len(uniqueValues) * len(datasets), 1)
    for value in uniqueValues:
        try:
            hasVersion = False
            gzSupport.addMessage(value)
            if desc.workspaceType == "RemoteDatabase" and versionName != None:
                arcpy.SetProgressorLabel("Creating Version " + versionName)
                hasVersion = gzSupport.createVersion(defaultWorkspace, defaultVersionName, versionName)
            if hasVersion == True or versionName == None or desc.workspaceType == "LocalDatabase":
                arcpy.env.Workspace = targetWorkspace
                targets = gzSupport.listDatasets(targetWorkspace)
                tNames = targets[0]
                tFullNames = targets[1]
                for dataset in datasets:
                    name = dataset.getAttributeNode("name").nodeValue
                    arcpy.SetProgressorLabel("Loading Dataset " + name)
                    targetTable = gzSupport.getFullName(name, tNames, tFullNames)
                    sourceTable = gzSupport.getFullName(name, sNames, sFullNames)
                    attrs = [f.name for f in arcpy.ListFields(targetTable)]
                    expr = getExpression(attrs, fieldNames, value)
                    arcpy.SetProgressorLabel("Loading Dataset " + name + " Where " + expr)
                    tName = targetTable[targetTable.rfind("\\") + 1 :]
                    tLocation = targetTable[0 : targetTable.rfind("\\")]
                    if gzSupport.deleteRows(tLocation, tName, expr) == True:
                        retVal = gzSupport.appendRows(sourceTable, targetTable, expr)
                        if retVal == False:
                            success == False
                    else:
                        success == False
                    arcpy.SetProgressorPosition()
                if success == True:
                    if desc.workspaceType == "RemoteDatabase":
                        arcpy.SetProgressorLabel("Reconcile and Post")
                        retVal = gzSupport.reconcilePost(defaultWorkspace, versionName, defaultVersionName)
                        if retVal == False:
                            success = False
                            gzSupport.deleteVersion(defaultWorkspace, versionName)
                    elif desc.workspaceType == "LocalDatabase":
                        arcpy.SetProgressorLabel("Completed Update for " + str(value))
                    gzSupport.logDatasetProcess(targetTable, sys.argv[0], retVal)
                else:
                    gzSupport.logDatasetProcess(targetTable, sys.argv[0], retVal)
                gzSupport.cleanupGarbage()

        except:
            gzSupport.showTraceback()
            success = False
            gzSupport.logDatasetProcess("Serious error", sys.argv[0], False)
        finally:
            arcpy.SetProgressorPosition()
            arcpy.ClearWorkspaceCache_management(defaultWorkspace)
    if success == False:
        gzSupport.addError("Errors occurred during process, look in log files for more information")
    if gzSupport.ignoreErrors == True:
        success = True
    if desc.workspaceType == "RemoteDatabase" and success == True:
        analyze(defaultWorkspace, datasets, tNames, tFullNames)
    arcpy.SetParameter(SUCCESS, success)

    arcpy.ClearWorkspaceCache_management(defaultWorkspace)
    gzSupport.compressGDB(gzSupport.workspace)
    gzSupport.compressGDB(defaultWorkspace)
    gzSupport.closeLog()
    return
Exemplo n.º 12
0
                    #    errCount += 1
                    #    success = False
                    #    err = "Blank or null value calculated: unable to set value for " + targetName + " " + str(calcString)
                    #    gzSupport.addError(err)
                    #    gzSupport.logProcessError(row.getValue(gzSupport.sourceNameField),gzSupport.sourceIDField,row.getValue(gzSupport.sourceIDField),targetName,err)

            try:
                updateCursor.updateRow(row)
            except:
                errCount += 1
                success = False
                err = "Exception caught: unable to update row"
                gzSupport.showTraceback()
                gzSupport.addError(err)
                gzSupport.logProcessError(
                    row.getValue(gzSupport.sourceNameField),
                    gzSupport.sourceIDField,
                    row.getValue(gzSupport.sourceIDField), "One of the values",
                    err)
            row = updateCursor.next()

    del updateCursor
    gzSupport.cleanupGarbage()
    arcpy.ResetProgressor()

    return success


if __name__ == "__main__":
    main()
Exemplo n.º 13
0
def main(argv=None):
    success = True
    if not arcpy.Exists(gzSupport.workspace):
        gzSupport.addMessage(gzSupport.workspace +
                             " does not exist, attempting to create")
        gzSupport.createGizintaGeodatabase()
    else:
        gzSupport.compressGDB(gzSupport.workspace)
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    try:
        gzSupport.addMessage("Looking for drawings modified since " + since)
        minTime = dt.datetime.strptime(since, "%d/%m/%Y %I:%M:%S %p")
        cadFiles = getFileList(cadFolder, cadExt, minTime)
        if len(cadFiles) > 0:
            progBar = len(cadFiles) + 1
            arcpy.SetProgressor("step", "Importing Drawings...", 0, progBar, 1)
            deleteExistingRows(datasets)
            arcpy.SetProgressorPosition()
        for item in cadFiles:
            cadPath = item[0]
            cadName = item[1]
            gzSupport.addMessage("Importing Drawing " + cadName)

            for dataset in datasets:
                try:
                    name = dataset.getAttributeNode("sourceName").nodeValue
                except:
                    name = dataset.getAttributeNode("name").nodeValue

                gzSupport.sourceIDField = dataset.getAttributeNode(
                    "sourceIDField").nodeValue
                arcpy.SetProgressorLabel("Loading " + name + " for " +
                                         cadName + "...")
                arcpy.env.Workspace = gzSupport.workspace
                targetName = dataset.getAttributeNode("targetName").nodeValue
                sourceWorkspace = os.path.join(cadPath, cadName)
                if not arcpy.Exists(
                        os.path.join(gzSupport.workspace, targetName)):
                    gzSupport.addMessage(
                        os.path.join(gzSupport.workspace, targetName) +
                        " does not exist")
                    mode = "export"
                else:
                    mode = "import"

                try:
                    if mode == "import":
                        retVal = gzSupport.importDataset(
                            sourceWorkspace, name, targetName, dataset)
                    elif mode == "export":
                        retVal = gzSupport.exportDataset(
                            sourceWorkspace, name, targetName, dataset)
                    #retVal = importLayer(cadPath,cadName,dataset)
                    if retVal == False:
                        success = False
                except:
                    gzSupport.showTraceback()
                    success = False
                    retVal = False

                arcpy.env.Workspace = gzSupport.workspace
                gzSupport.logDatasetProcess(cadName, name, retVal)
                gzSupport.cleanupGarbage()
            arcpy.SetProgressorPosition()
    except:
        gzSupport.addError("A Fatal Error occurred")
        gzSupport.showTraceback()
        success = False
        gzSupport.logDatasetProcess("", "", False)
    finally:
        arcpy.ResetProgressor()
        arcpy.RefreshCatalog(gzSupport.workspace)
        arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
        gzSupport.cleanupGarbage()

    if success == False:
        gzSupport.addError(
            "Errors occurred during process, look in log files for more information"
        )
    if gzSupport.ignoreErrors == True:
        success = True
    gzSupport.closeLog()
    arcpy.SetParameter(SUCCESS, success)
Exemplo n.º 14
0
def main(argv=None):
    success = True
    if not arcpy.Exists(gzSupport.workspace):
        gzSupport.addMessage(gzSupport.workspace + " does not exist, attempting to create")
        gzSupport.createGizintaGeodatabase()
    else:
        gzSupport.compressGDB(gzSupport.workspace)
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    try:
        gzSupport.addMessage("Looking for drawings modified since " + since)
        minTime = datetime.datetime.strptime(since, "%d/%m/%Y %I:%M:%S %p")
        cadFiles = gzSupport.getFileList(cadFolder, cadExt, minTime)
        if len(cadFiles) > 0:
            progBar = len(cadFiles) + 1
            arcpy.SetProgressor("step", "Importing Drawings...", 0, progBar, 1)
            arcpy.SetProgressorPosition()
            gzSupport.deleteExistingRows(datasets)
        for item in cadFiles:
            cadPath = item[0]
            cadName = item[1]
            gzSupport.addMessage("Importing Drawing " + cadName)

            for dataset in datasets:
                try:
                    name = dataset.getAttributeNode("sourceName").nodeValue
                except:
                    name = dataset.getAttributeNode("name").nodeValue

                gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue
                xmlFields = gzSupport.getXmlElements(gzSupport.xmlFileName, "Field")
                arcpy.SetProgressorLabel("Loading " + name + " for " + cadName + "...")
                arcpy.env.Workspace = gzSupport.workspace
                targetName = dataset.getAttributeNode("targetName").nodeValue
                sourceWorkspace = os.path.join(cadPath, cadName)
                exists = False
                if not arcpy.Exists(os.path.join(gzSupport.workspace, targetName)):
                    gzSupport.addMessage(os.path.join(gzSupport.workspace, targetName) + " does not exist")
                else:
                    exists = True
                    # arcpy.Delete_management(os.path.join(gzSupport.workspace,targetName))

                try:
                    if not exists == True:
                        retVal = gzSupport.exportDataset(sourceWorkspace, name, targetName, dataset, xmlFields)
                        addDrawingField(os.path.join(gzSupport.workspace, targetName), cadName)
                    else:
                        retVal = importLayer(cadPath, cadName, dataset)
                        addDrawingField(os.path.join(gzSupport.workspace, targetName), cadName)
                    if retVal == False:
                        success = False
                except:
                    gzSupport.showTraceback()
                    success = False
                    retVal = False

                arcpy.env.Workspace = gzSupport.workspace
                gzSupport.logDatasetProcess(cadName, name, retVal)
                gzSupport.cleanupGarbage()
            arcpy.SetProgressorPosition()
    except:
        gzSupport.addError("A Fatal Error occurred")
        gzSupport.showTraceback()
        success = False
        gzSupport.logDatasetProcess("", "", False)
    finally:
        arcpy.ResetProgressor()
        arcpy.RefreshCatalog(gzSupport.workspace)
        arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
        gzSupport.cleanupGarbage()

    if success == False:
        gzSupport.addError("Errors occurred during process, look in log files for more information")
    if gzSupport.ignoreErrors == True:
        success = True
    gzSupport.closeLog()
    arcpy.SetParameter(SUCCESS, success)
Exemplo n.º 15
0
def main(argv = None):
    # process one or more drawings
    global log, playlists_xml, playlists
    outputSuccess = True # default value, will be set to False if any processing errors returned
    doImports()
    processed = 0
    errorCount = 0
    cfgfile = fixServerConfigPath(gseData_xml)
    xmlDataDoc = xml.dom.minidom.parse(cfgfile)
    gseData = gseDataSettings(xmlDataDoc)
    gss = []
    for playlist in playlists_xml:
        filepath = fixConfigPath(playlist)
        playlists.append(filepath)
        xmlDoc = xml.dom.minidom.parse(filepath)
        gsClass = gseSettings(xmlDoc,gseData)
        gss.append(gsClass)
        
    tm = time.strftime("%Y%m%d%H%M%S")
    
    logFile = gss[0].logFileName.replace('.log','_' + tm + '.log')
    log = open(logFile,'w')
    autoSync = gss[0].autoSync
    exitOnError = gss[0].exitOnError
        
    try:
        totalTime = gzSupport.timer(0)
        inputFiles = gzSupport.getFileList(gss[0].cadFolder,gss[0].fileExt,gss[0].minTime)

        for fileFound in inputFiles:
            if errorCount > 0 and exitOnError == True:
                break
            folder = fileFound[0]
            dwg = fileFound[1]
            cadFile = os.path.join(folder,dwg)
            drawingTime = gzSupport.timer(0)
            pVal = 0 # counter for playlist looping
            partFailed = False
            if(dwg.find(gss[pVal].nameContains) > -1) and os.path.exists(cadFile):
                msg("\n" + dwg)
                for playlist in playlists: # Loop through the playlists and do the loading from CAD
                    if cont(errorCount,exitOnError,partFailed): # stop processing if any errors or continue if exit on error param is false
                        retVal = doLoad(playlist,folder,dwg,gss[pVal]) # Load the playlist using FME subprocess
                        if(retVal != True):
                            outputSuccess = False
                            errorCount += 1
                            gss[pVal].loaded = False
                            partFailed = True
                        else:
                            gss[pVal].loaded = True
                    pVal += 1
                if cont(errorCount,exitOnError,partFailed):
                    pVal = 0
                    if partFailed == False and autoSync == True: # Sync is param set and no errors have been returned
                        retVal = doSync(playlists,folder,dwg,gss[pVal]) # sync from Staging to Production
                        if(retVal != True):
                            outputSuccess = False
                            errorCount += 1
                        else:
                            for playlist in playlists: # go back through the playlists and Sync for this drawing
                                gss[pVal].syncd = True
                                pVal += 1
                loaded = False
                for gs in gss:
                    if (gs.loaded == True or gs.syncd == True) and dwg.find(gs.nameContains) > -1: # if any load or sync processing happened...
                        loaded = True
                if loaded == True:
                    msg(dwg + " total processing time: " + getTimeElapsed(drawingTime))
                    processed += 1
                    if gss[0].deleteCADFiles == True and partFailed == False:
                        try:
                            gzSupport.cleanupGarbage()
                            os.remove(cadFile)
                            try:
                                os.remove(cadFile[:len(cadFile)-4]+'.wld')
                            except:
                                pass
                            msg(cadFile + " deleted")
                        except:
                            msg("Unable to delete CAD file " + cadFile + "... continuing")
                if processed % 10 == 0:
                    msg("Processed " + str(processed))
                gzSupport.cleanupGarbage()
    except:
        errorCount += 1
        msg("A fatal error was encountered in gseLoaderFME.py")
        gzSupport.showTraceback()
        outputSuccess = False
        logProcess("gseLoaderFME","drawings",outputSuccess,gss[0].stagingWS)

    finally:
        arcpy.SetParameterAsText(successParam,outputSuccess)
        msg("\nTotal Number of Errors = " + str(errorCount))
        msg("outputSuccess set to: " + str(outputSuccess))
        msg(str(processed) + " drawings processed")
        msg("Total Processing time: " + getTimeElapsed(totalTime) + "\n")
        del gss, playlists
        log.close()
Exemplo n.º 16
0
def main(argv=None):
    global targetWorkspace
    hasVersion = False
    desc = arcpy.Describe(gzSupport.workspace)
    if desc.workspaceType != "RemoteDatabase" and versionName == None:
        targetWorkspace = defaultWorkspace
    success = True
    arcpy.ResetProgressor()
    arcpy.env.Workspace = gzSupport.workspace
    uniqueValues = gzSupport.getFieldValues("Unique", fieldNames, datasets)[0]
    sources = gzSupport.listDatasets(gzSupport.workspace)
    sNames = sources[0]
    sFullNames = sources[1]
    arcpy.SetProgressor("Step", "Load by " + str(fieldNames) + "...", 0,
                        len(uniqueValues) * len(datasets), 1)
    for value in uniqueValues:
        try:
            hasVersion = False
            gzSupport.addMessage(value)
            if desc.workspaceType == "RemoteDatabase" and versionName != None:
                arcpy.SetProgressorLabel("Creating Version " + versionName)
                hasVersion = gzSupport.createVersion(defaultWorkspace,
                                                     defaultVersionName,
                                                     versionName)
            if hasVersion == True or versionName == None or desc.workspaceType == "LocalDatabase":
                arcpy.env.Workspace = targetWorkspace
                targets = gzSupport.listDatasets(targetWorkspace)
                tNames = targets[0]
                tFullNames = targets[1]
                for dataset in datasets:
                    name = dataset.getAttributeNode("name").nodeValue
                    arcpy.SetProgressorLabel("Loading Dataset " + name)
                    targetTable = gzSupport.getFullName(
                        name, tNames, tFullNames)
                    sourceTable = gzSupport.getFullName(
                        name, sNames, sFullNames)
                    attrs = [f.name for f in arcpy.ListFields(targetTable)]
                    expr = getExpression(attrs, fieldNames, value)
                    arcpy.SetProgressorLabel("Loading Dataset " + name +
                                             " Where " + expr)
                    tName = targetTable[targetTable.rfind("\\") + 1:]
                    tLocation = targetTable[0:targetTable.rfind("\\")]
                    if gzSupport.deleteRows(tLocation, tName, expr) == True:
                        retVal = gzSupport.appendRows(sourceTable, targetTable,
                                                      expr)
                        if retVal == False:
                            success == False
                    else:
                        success == False
                    arcpy.SetProgressorPosition()
                if success == True:
                    if desc.workspaceType == "RemoteDatabase":
                        arcpy.SetProgressorLabel("Reconcile and Post")
                        retVal = gzSupport.reconcilePost(
                            defaultWorkspace, versionName, defaultVersionName)
                        if retVal == False:
                            success = False
                            gzSupport.deleteVersion(defaultWorkspace,
                                                    versionName)
                    elif desc.workspaceType == "LocalDatabase":
                        arcpy.SetProgressorLabel("Completed Update for " +
                                                 str(value))
                    gzSupport.logDatasetProcess(sys.argv[0], targetTable,
                                                retVal)
                else:
                    gzSupport.logDatasetProcess(sys.argv[0], targetTable,
                                                retVal)
                gzSupport.cleanupGarbage()

        except:
            gzSupport.showTraceback()
            success = False
            gzSupport.logDatasetProcess("Serious error", sys.argv[0], False)
        finally:
            arcpy.SetProgressorPosition()
            arcpy.ClearWorkspaceCache_management(defaultWorkspace)
    if success == False:
        gzSupport.addError(
            "Errors occurred during process, look in log files for more information"
        )
    if gzSupport.ignoreErrors == True:
        success = True
    if desc.workspaceType == "RemoteDatabase" and success == True:
        analyze(defaultWorkspace, datasets, tNames, tFullNames)
    arcpy.SetParameter(SUCCESS, success)

    arcpy.ClearWorkspaceCache_management(defaultWorkspace)
    gzSupport.compressGDB(gzSupport.workspace)
    gzSupport.compressGDB(defaultWorkspace)
    gzSupport.closeLog()
    return