Esempio n. 1
0
def main(argv=None):
    success = True
    gzSupport.compressGDB(gzSupport.workspace)
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    tables = gzSupport.listDatasets(gzSupport.workspace)
    tNames = tables[0]
    tFullNames = tables[1]
    name = ''

    for dataset in datasets:
        arcpy.env.Workspace = gzSupport.workspace
        name = dataset.getAttributeNode("name").nodeValue
        table = gzSupport.getFullName(name, tNames, tFullNames)
        gzSupport.sourceIDField = dataset.getAttributeNode(
            "sourceIDField").nodeValue
        gzSupport.sourceNameField = dataset.getAttributeNode(
            "sourceNameField").nodeValue
        if not arcpy.Exists(table):
            gzSupport.addError("Feature Class " + table +
                               " does not exist, exiting")
            arcpy.SetParameter(SUCCESS, False)
            return
        if not arcpy.TestSchemaLock(table):
            gzSupport.addError("Unable to obtain a schema lock for " + table +
                               ", exiting")
            arcpy.SetParameter(SUCCESS, False)
            return -1
        desc = arcpy.Describe(table)
        fields = dataset.getElementsByTagName("Field")
        try:
            attrs = [f.name for f in arcpy.ListFields(table)]
            for field in fields:
                arcpy.env.Workspace = gzSupport.workspace
                targetName = gzSupport.getNodeValue(field, "TargetName")
                gzSupport.addGizintaField(table, targetName, field, attrs)

            retVal = setFieldValues(table, fields)
            if retVal == False:
                success = False
            gzSupport.logDatasetProcess(name, "Fields", retVal)
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
            gzSupport.cleanupGarbage()

        except:
            gzSupport.showTraceback()
            success = False
            gzSupport.logDatasetProcess("fieldCalculator", name, False)
        finally:
            arcpy.RefreshCatalog(table)
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    if success == False:
        gzSupport.addError(
            "Errors occurred during process, look in log file tools\\log\\fieldCalculator.log for more information"
        )
    if gzSupport.ignoreErrors == True:
        success = True
    arcpy.SetParameter(SUCCESS, success)
    arcpy.ResetProgressor()
    gzSupport.closeLog()
    return
Esempio n. 2
0
def process_pallets(pallets, is_post_copy=False):
    '''
    pallets: Pallet[]
    is_post_copy: Boolean

    Loop over all pallets, check if data has changed and determine whether to process.
    Call `process` if this is not the post copy. Otherwise call `post_copy_process`.
    Finally, call ship.
    '''

    if not is_post_copy:
        verb = 'processing'
    else:
        verb = 'post copy processing'

    log.info('%s pallets...', verb)

    for pallet in pallets:
        try:
            if pallet.is_ready_to_ship(
            ):  #: checks for schema changes or errors
                if pallet.requires_processing(
                ) and pallet.success[0]:  #: checks for data that was updated
                    log.info('%s pallet: %r', verb, pallet)
                    start_seconds = clock()

                    arcpy.ResetEnvironments()
                    arcpy.ClearWorkspaceCache_management()
                    if not is_post_copy:
                        with seat.timed_pallet_process(pallet, 'process'):
                            pallet.process()
                    else:
                        with seat.timed_pallet_process(pallet,
                                                       'post_copy_process'):
                            pallet.post_copy_process()

                    log.debug('%s pallet %s', verb.replace('ing', 'ed'),
                              seat.format_time(clock() - start_seconds))

                if not is_post_copy:
                    start_seconds = clock()

                    log.info('shipping pallet: %r', pallet)
                    arcpy.ResetEnvironments()
                    arcpy.ClearWorkspaceCache_management()
                    with seat.timed_pallet_process(pallet, 'ship'):
                        pallet.ship()
                    log.debug('shipped pallet %s',
                              seat.format_time(clock() - start_seconds))
        except Exception as e:
            pallet.success = (False, e)
            log.error('error %s pallet: %s for pallet: %r',
                      verb,
                      e,
                      pallet,
                      exc_info=True)
Esempio n. 3
0
def process_pallets(pallets):
    '''pallets: Pallet[]

    Loop over all pallets, check if data has changed, and determine whether to process.
    '''

    verb = 'processing'

    log.info('%s pallets...', verb)

    for pallet in pallets:
        try:
            if pallet.is_ready_to_ship():  #: checks for schema changes or errors
                if pallet.requires_processing():  #: checks for data that was updated
                    log.info('%s pallet: %r', verb, pallet)
                    start_seconds = perf_counter()

                    arcpy.ResetEnvironments()
                    arcpy.ClearWorkspaceCache_management()

                    with seat.timed_pallet_process(pallet, 'process'):
                        pallet.process()

                    log.debug('%s pallet %s', verb.replace('ing', 'ed'), seat.format_time(perf_counter() - start_seconds))
        except Exception as e:
            pallet.success = (False, str(e))
            log.error('error %s pallet: %s for pallet: %r', verb, e, pallet, exc_info=True)
Esempio n. 4
0
def main(argv=None):
    success = True
    name = ''
    try:
        if not arcpy.Exists(gzSupport.workspace):
            gzSupport.addMessage(gzSupport.workspace +
                                 " does not exist, attempting to create")
            gzSupport.createGizintaGeodatabase()
        else:
            gzSupport.compressGDB(gzSupport.workspace)
        if len(datasets) > 0:
            progBar = len(datasets) + 1
            arcpy.SetProgressor("step", "Importing Layers...", 0, progBar, 1)
            arcpy.SetProgressorPosition()
        for dataset in datasets:
            gzSupport.sourceIDField = dataset.getAttributeNode(
                "sourceIDField").nodeValue
            sourceName = dataset.getAttributeNode("sourceName").nodeValue
            targetName = dataset.getAttributeNode("targetName").nodeValue
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " +
                                     targetName + "...")
            if not arcpy.Exists(sourceLayer):
                gzSupport.addError("Layer " + sourceLayer +
                                   " does not exist, exiting")
                return
            target = os.path.join(gzSupport.workspace, targetName)
            arcpy.env.Workspace = gzSupport.workspace
            if not arcpy.Exists(target):
                gzSupport.addMessage("Feature Class " + target +
                                     " does not exist")
            else:
                arcpy.Delete_management(target)
            try:
                retVal = exportDataset(sourceLayer, targetName, dataset)
                if retVal == False:
                    success = False
            except:
                gzSupport.showTraceback()
                success = False
                retVal = False
            gzSupport.logDatasetProcess(sourceName, targetName, retVal)
        arcpy.SetProgressorPosition()
    except:
        gzSupport.addError("A Fatal Error occurred")
        gzSupport.showTraceback()
        success = False
        gzSupport.logDatasetProcess("extractLayerToGDB", name, False)
    finally:
        arcpy.ResetProgressor()
        arcpy.RefreshCatalog(gzSupport.workspace)
        arcpy.ClearWorkspaceCache_management(gzSupport.workspace)

    if success == False:
        gzSupport.addError(
            "Errors occurred during process, look in log files for more information"
        )
    if gzSupport.ignoreErrors == True:
        success = True
    gzSupport.closeLog()
    arcpy.SetParameter(SUCCESS, success)
Esempio n. 5
0
def crs2_empty_stage_sde(args):
    # script name
    script_name = os.path.basename(__file__)

    # script parameters
    sde = args[0]
    # log = args[1]

    # Set environment
    arcpy.env.workspace = sde

    # log function
    log_msg('calling {}'.format(script_name))

    # variables
    err_message = None

    try:
        # clear workspce cache
        arcpy.ClearWorkspaceCache_management()

        # Find all feature classes and delete a subset
        fcl = arcpy.ListFeatureClasses()
        log_msg('Deleting subset of feature classes:')

        for fc in fcl:
            log_msg(fc)
            if fc in itemsToKeep:
                log_msg('Kept:{}'.format(fc))
            else:
                try:
                    arcpy.Delete_management(fc)
                    log_msg('Deleted: {}'.format(fc))
                except:
                    # print('***ERROR*** while deleting {} - delete manually!!!').format(fc)
                    err_message = 'ERROR: deleting {}\n'.format(fc)

        # Find all tables and delete a subset
        tbll = arcpy.ListTables()
        log_msg('Deleting subset of feature tables:')
        for tbl in tbll:
            if tbl in itemsToKeep:
                log_msg('Kept:{}'.format(tbl))
            else:
                try:
                    arcpy.Delete_management(tbl)
                    log_msg('Deleted: {}'.format(tbl))
                except:
                    if err_message != None:
                        err_message = err_message + 'ERROR: deleting {}\n'.format(
                            tbl)
                    else:
                        err_message = 'ERROR: deleting {}\n'.format(tbl)

        log_msg("Process time: %s \n" %
                str(datetime.datetime.now() - starttime))

    except Exception as e:
        err_message = "ERROR while running {0}: {1}".format(script_name, e)
    return err_message, log_messages
Esempio n. 6
0
def main(argv=None):
    # main function - list the datasets and delete rows
    success = True
    try:
        names = gzSupport.listDatasets(sourceGDB)
        tNames = names[0]
        tFullNames = names[1]
        arcpy.SetProgressor("Step", "Deleting rows...", 0, len(tFullNames), 1)
        i = 0
        for name in tFullNames:
            arcpy.SetProgressorPosition(i)
            arcpy.SetProgressorLabel(" Deleting rows in " + name + "...")
            # for each full name
            if len(datasetNames) == 0 or tNames[i].upper() in datasetNames:
                retVal = doTruncate(name)
                gzSupport.logDatasetProcess(name, "deleteRowsGDB", retVal)
                if retVal == False:
                    success = False
            else:
                gzSupport.addMessage("Skipping " + tNames[i])
            i += i
    except:
        gzSupport.showTraceback()
        gzSupport.addError(pymsg)
        success = False
        gzSupport.logDatasetProcess(name, "deleteRowsGDB", success)
    finally:
        arcpy.SetParameter(SUCCESS, success)
        arcpy.ResetProgressor()
        gzSupport.closeLog()
        arcpy.ClearWorkspaceCache_management(sourceGDB)
Esempio n. 7
0
def main(argv = None):
    # main function - list the datasets and delete rows
    success = True
    name = ''
    gzSupport.workspace = sourceGDB
    try:
        if len(datasetNames) == 0:
            names = gzSupport.listDatasets(sourceGDB)
            tNames = names[0]
        else:
            tNames = datasetNames
        arcpy.SetProgressor("Step","Deleting rows...",0,len(tNames),1)
        i = 0
        for name in tNames:
            arcpy.SetProgressorPosition(i)
            arcpy.SetProgressorLabel(" Deleting rows in " + name + "...")
            # for each full name
            if len(datasetNames) == 0 or gzSupport.nameTrimmer(name.upper()) in datasetNames:
                retVal = doTruncate(os.path.join(sourceGDB,name))
                gzSupport.logDatasetProcess("deleteRowsGDB",name,retVal)
                if retVal == False:
                    success = False
            else:
                gzSupport.addMessage("Skipping "  + gzSupport.nameTrimmer(name))
            i = i + i
    except:
        gzSupport.showTraceback()
        gzSupport.addError("Failed to delete rows")
        success = False
        gzSupport.logDatasetProcess("deleteRowsGDB",name,success)
    finally:
        arcpy.SetParameter(SUCCESS, success)
        arcpy.ResetProgressor()
        gzSupport.closeLog()
        arcpy.ClearWorkspaceCache_management(sourceGDB)
Esempio n. 8
0
def reconcilePost(sdeDefault, versionName, defaultVersion):
    # reconcile and post a version
    addMessageLocal("Reconcile and Post Version... ")

    if versionName.split(".")[0] == versionName:
        # default to DBO
        versionName = "DBO." + versionName
    retcode = False

    addMessageLocal("Reconciling " + versionName + "...")
    try:
        retcode = arcpy.ReconcileVersion_management(
            sdeDefault, versionName, defaultVersion, "BY_OBJECT",
            "FAVOR_TARGET_VERSION", "LOCK_ACQUIRED", "NO_ABORT", "POST")
        if str(retcode) == sdeDefault:
            retcode = True
        else:
            addMessageLocal("Unexpected result: " + str(retcode) +
                            ", continuing...")
            retcode = True
    except:
        addMessageLocal("Reconcile failed: \n" + str(retcode) + "\n" +
                        sdeDefault)
        retcode = False
    arcpy.env.workspace = sdeDefault
    arcpy.ClearWorkspaceCache_management(sdeDefault)
    return retcode
Esempio n. 9
0
def main(argv = None):
    # main function - list the source and target datasets, then append where there is a match on non-prefixed name
    success = True
    name = ''
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    try:
        if len(datasetNames) == 0:
            sources = gzSupport.listDatasets(sourceGDB)
            sNames = sources[0]
            sFullNames = sources[1]
            targets = gzSupport.listDatasets(targetGDB)
            tNames = targets[0]
            tFullNames = targets[1]
        else:
            sNames = datasetNames
        
        s = 0
        arcpy.SetProgressor("Step","Appending rows...",0,len(sNames),1)
        for name in sNames:
            arcpy.SetProgressorPosition(s)
            arcpy.SetProgressorLabel(" Appending rows in " + name + "...")
            # for each source name
            if debug:
                gzSupport.addMessage(name)
            target = os.path.join(targetGDB,name)
            if arcpy.Exists(target):
                # append if there is a match
                if len(datasetNames) == 0 or gzSupport.nameTrimmer(name) in datasetNames:
                    retVal = doAppend(os.path.join(sourceGDB,name),target)
                    gzSupport.logDatasetProcess("appendAlltoGDB",name,retVal)
                    if retVal == False:
                        success = False
                else:
                    gzSupport.addMessage("Skipping "  + gzSupport.nameTrimmer(name))

            s = s + 1
    except:
        gzSupport.showTraceback()
        gzSupport.addError("Unable to append datasets")
        success = False
        gzSupport.logDatasetProcess("appendAlltoGDB",name,success)
    finally:
        arcpy.SetParameter(SUCCESS, success)
        arcpy.ResetProgressor()
        gzSupport.closeLog()
        arcpy.ClearWorkspaceCache_management(targetGDB)
Esempio n. 10
0
def main(argv=None):
    # main function - list the source and target datasets, then append where there is a match on non-prefixed name
    success = True
    arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    try:
        sources = gzSupport.listDatasets(sourceGDB)
        sNames = sources[0]
        sFullNames = sources[1]
        targets = gzSupport.listDatasets(targetGDB)
        tNames = targets[0]
        tFullNames = targets[1]
        s = 0
        arcpy.SetProgressor("Step", "Appending rows...", 0, len(sFullNames), 1)
        for name in sNames:
            arcpy.SetProgressorPosition(s)
            arcpy.SetProgressorLabel(" Deleting rows in " + name + "...")
            # for each source name
            if debug:
                gzSupport.addMessage(name)
            try:
                # look for the matching name in target names
                t = tNames.index(name)
            except:
                # will get here if no match
                t = -1
            if t > -1:
                # append if there is a match
                if len(datasetNames) == 0 or name.upper() in datasetNames:
                    retVal = doAppend(sFullNames[s], tFullNames[t])
                    gzSupport.logDatasetProcess(name, "appendAlltoGDB", retVal)
                    if retVal == False:
                        success = False
                else:
                    gzSupport.addMessage("Skipping " + name)

            s = s + 1
    except:
        gzSupport.showTraceback()
        gzSupport.addError(pymsg)
        success = False
        gzSupport.logDatasetProcess(name, "appendAlltoGDB", success)
    finally:
        arcpy.SetParameter(SUCCESS, success)
        arcpy.ResetProgressor()
        gzSupport.closeLog()
        arcpy.ClearWorkspaceCache_management(targetGDB)
def main():

    # Set up logging
    LOG_FILENAME =  r"C:\Users\1528874122E\Desktop\test\DomainCompare.log"

    logging.basicConfig(level=logging.DEBUG,
                    format='%(asctime)s %(levelname)-8s %(message)s',
                    datefmt='%a, %d %b %Y %H:%M:%S',
                    filename= LOG_FILENAME,
                    filemode='w')
    try:

        # Percent to consider a strong match
        percent = 75

        # Connection path to geodatabse
        myGDB = r"C:\Users\1528874122E\Desktop\ScriptProjects\AFAdaptation311\311_AF_GeoBase_Shell_2018.gdb"

        # List of all existing domains (as domain objects)
        domainDesc = dict((str(domain.name), sorted(list(value.lower() for value in domain.codedValues.values()))) for domain in arcpy.da.ListDomains(myGDB) if domain.domainType == 'CodedValue')

        # Finds domains with duplicate values in the exact same order
        rev_domainDesc = {}
        for key, value in domainDesc.items():
            rev_domainDesc.setdefault(str(value), set()).add(key)
        dup_domainDesc = dict((key, values) for key, values in rev_domainDesc.items() if len(values) > 1)

        print "{} sets of identical matching domain descriptions found in {}".format(len(dup_domainDesc), myGDB)
        logging.info("{} sets of identical matching domain descriptions found in {}".format(len(dup_domainDesc), myGDB))

        for values in dup_domainDesc.values():
            logging.info(", ".join(val for val in values))

        strongMatch=[]
        # Finds domains with a % match of coded value descriptions
        for aKey, aValList in domainDesc.iteritems():
            for bKey, bValList in domainDesc.iteritems():
                if aKey != bKey:
                    matches = [x for x in aValList if x in bValList]
                    if len(matches)>0:
                        percentMatchAB = (float(len(matches))/float(len(aValList)))*100
                        percentMatchBA = (float(len(matches))/float(len(bValList)))*100
                        if percentMatchAB>=percent and percentMatchBA>=percent:
                            strongMatch.append(list((aKey, bKey)))

        print "{} sets of strong matching domain descriptions found in {}".format(len(strongMatch), myGDB)
        logging.info("{} sets of strong matching domain descriptions found in {}".format(len(strongMatch), myGDB))

        for values in strongMatch:
            logging.info(", ".join(val for val in values))

    except:
        logging.info('Failed...')
        sys.exit(2)

    finally:
        # Cleanup
        arcpy.ClearWorkspaceCache_management()
Esempio n. 12
0
	def commit_properties(self):
		arcpy.AddMessage("inn")
		if self.export_source_directory.lower().endswith('.sde'):
			arcpy.ClearWorkspaceCache_management(self.export_source_directory)

		arcpy.AddMessage("middle")
		self.result_file = os.path.join(arcpy.env.scratchWorkspace, self.zipfile_name)
		for job in self.export_jobs:
			job['layer'] = os.path.join(self.export_source_directory, job['layer'])
Esempio n. 13
0
def importLayer(cadPath, cadName, dataset):
    result = False
    try:
        name = dataset.getAttributeNode("targetName").nodeValue
    except:
        name = dataset.getAttributeNode("name").nodeValue

    table = os.path.join(gzSupport.workspace, name)
    layerName = dataset.getAttributeNode("sourceName").nodeValue
    layer = os.path.join(cadPath, cadName, layerName)
    gzSupport.addMessage("Importing Layer " + layer)

    try:
        whereClause = gzSupport.getNodeValue(dataset, "WhereClause")
        xmlFields = dataset.getElementsByTagName("Field")
        gzSupport.addMessage("Where " + whereClause)
        if not arcpy.Exists(table):
            err = "Feature Class " + name + " does not exist"
            gzSupport.addError(err)
            gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name,
                                      name, err)
            return False
        if whereClause != '':
            view = gzSupport.makeFeatureView(gzSupport.workspace, layer,
                                             layerName + "_View", whereClause,
                                             xmlFields)
        else:
            view = layer
        count = arcpy.GetCount_management(view).getOutput(0)
        gzSupport.addMessage(str(count) + " source Features for " + name)

        if hasJoinTo(dataset) == True:
            res = joinToCsv(view, dataset, cadPath, cadName)
            result = res[0]
            view = res[1]
        else:
            view = view
            result = True

        if result == True and count > 0:
            arcpy.Append_management([view], table, "NO_TEST", "", "")
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)

    except:
        err = "Failed to import layer " + name
        gzSupport.addError(err)
        gzSupport.showTraceback()
        gzSupport.logProcessError(cadName, gzSupport.sourceIDField, name,
                                  layerName, err)
    gzSupport.cleanupGarbage()
    try:
        del view
    except:
        gzSupport.addMessage("")
    return result
Esempio n. 14
0
 def arcpy(self):
     print "Setting arcpy environmental variables...",
     arcpy.env.workspace = self.w
     arcpy.env.scratchWorkspace = self.temp  # changed here from self.w to self.temp
     arcpy.ClearWorkspaceCache_management()
     arcpy.env.snapRaster = self.d
     arcpy.env.extent = arcpy.sa.Raster(self.d).extent
     arcpy.env.cellSize = self.d
     arcpy.env.mask = self.d
     arcpy.env.overwriteOutput = True
     print "OK"
def createLocator(info):
    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = info['workspace']

    in_address_locator_style = info['in_address_locator_style']
    in_reference_data = info['in_reference_data']
    in_field_map = info['in_field_map']
    out_address_locator = info['loc_path']
    config_keyword = info['config_keyword']
    in_data = info['in_data']

    print "removing temp files."

    if os.path.isfile(out_address_locator + '.loc'):
        os.remove(out_address_locator + '.loc')

    if os.path.isfile(out_address_locator + '.loc.xml'):
        os.remove(out_address_locator + '.loc.xml')

    if os.path.isfile(out_address_locator + '.lox'):
        os.remove(out_address_locator + '.lox')

    print "Creating the locator: " + out_address_locator + "."
    arcpy.ClearWorkspaceCache_management()

    try:
        arcpy.CreateAddressLocator_geocoding(in_address_locator_style,
                                             in_reference_data, in_field_map,
                                             out_address_locator,
                                             config_keyword)
        print "Succcesfully Created thelocator: " + out_address_locator + "!"
    except:
        print 'Error creating geoccoder : ' + out_address_locator + '.'
        print arcpy.GetMessages(2)
        logger.error('Error creating geoccoder : ' + out_address_locator + '.')
        logger.error(arcpy.GetMessages(2))

    arcpy.ClearWorkspaceCache_management()
    arcpy.env.workspace = ""
Esempio n. 16
0
def sde2shp(sde_connection,featureclass_name,output_path,outputSHPname):
    #establish sde connection workspace
    arcpy.env.workspace = sde_connection
    #does the shapefile exists is so delete it
    if arcpy.Exists(output_path+'//'+outputSHPname):
        arcpy.Delete_management(output_path+'//'+outputSHPname)
    #Export the sde featureclass to the esri shapefile
    arcpy.FeatureClassToFeatureClass_conversion(featureclass_name, 
                                            output_path, 
                                            outputSHPname)
    #drop SDE connection 
    arcpy.ClearWorkspaceCache_management()
    return output_path+'\\'+outputSHPname
def extract(xmlFileName, rowLimit, workspace, sourceLayer, targetFC):

    xmlDoc = dla.getXmlDoc(xmlFileName)
    if workspace == "" or workspace == "#" or workspace == None:
        dla.workspace = arcpy.env.scratchGDB
    else:
        dla.workspace = workspace
    fields = dla.getFields(xmlFileName)
    success = True
    name = ''
    try:
        if not arcpy.Exists(dla.workspace):
            dla.addMessage(dla.workspace +
                           " does not exist, attempting to create")
            dla.createGeodatabase()
        if len(fields) > 0:
            arcpy.SetProgressor("step", "Importing Layer...", 0, 1, 1)

            if sourceLayer == '' or sourceLayer == '#':
                source = dla.getNodeValue(xmlDoc, "Datasets/Source")
            else:
                source = sourceLayer
            if targetFC == '' or targetFC == '#':
                targetName = dla.getTargetName(xmlDoc)
            else:
                targetName = targetFC[targetFC.rfind(os.sep) + 1:]

            sourceName = dla.getSourceName(xmlDoc)
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " +
                                     targetName + "...")
            #if not arcpy.Exists(sourceLayer):
            #    dla.addError("Layer " + sourceLayer + " does not exist, exiting")
            #    return

            retVal = exportDataset(xmlDoc, sourceLayer, dla.workspace,
                                   targetName, rowLimit)
            if retVal == False:
                success = False

        arcpy.SetProgressorPosition()
    except:
        dla.addError("A Fatal Error occurred")
        dla.showTraceback()
        success = False
    finally:
        arcpy.ResetProgressor()
        #arcpy.RefreshCatalog(dla.workspace)
        arcpy.ClearWorkspaceCache_management(dla.workspace)

    return success
Esempio n. 18
0
def main(argv = None):
    success = True
    gzSupport.compressGDB(gzSupport.workspace)
    if len(datasets) > 0:
        progBar = len(datasets)
        arcpy.SetProgressor("step", "Running QA...", 0,progBar, 1) 
    for dataset in datasets:
        arcpy.env.Workspace = gzSupport.workspace
        name = dataset.getAttributeNode("name").nodeValue
        gzSupport.sourceIDField = dataset.getAttributeNode("sourceIDField").nodeValue
        table = os.path.join(gzSupport.workspace,name)
        fields = dataset.getElementsByTagName("Field")
        try:
            # run qa for dataset
            qaRulesDataset = dataset.getAttributeNode("qa").nodeValue
            gzSupport.addMessage("\nRunning QA (" + qaRulesDataset + ") for " + name)
            retVal = runDatasetChecks(dataset,table,qaRulesDataset)
            if retVal == False:
                success = False
            
            for field in fields:
                sourceQA = False
                targetQA = False
                if sourceFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1:
                    sourceQA = True
                    fieldName = gzSupport.getNodeValue(field,"SourceName")
                if targetFieldQA.lower() == "true" and qaRulesDataset.find("CheckFields") > -1:
                    targetQA = True
                    fieldName = gzSupport.getNodeValue(field,"TargetName")
                retVal = runFieldCheck(dataset,table,field,sourceQA,targetQA)
                if retVal == False:
                    success = False
                gzSupport.logDatasetProcess(name,fieldName,retVal)
            arcpy.SetProgressorPosition()
        except:
            gzSupport.showTraceback()
            gzSupport.addError("Field Check Error")
            success = False
            gzSupport.logDatasetProcess(name,"",False)
        finally:
            arcpy.ResetProgressor()
            arcpy.RefreshCatalog(table)
            arcpy.ClearWorkspaceCache_management(gzSupport.workspace)
    if success == False:
        gzSupport.addError("Errors occurred during process, look in log files for more information")        
    if gzSupport.ignoreErrors == True:
        success = True
    arcpy.SetParameter(SUCCESS, success)
    gzSupport.closeLog()
    return
Esempio n. 19
0
def main():
    try:
        # Connection path to geodatabse (as administrator if SDE)
        myGDB = r"C:\Users\1528874122E\AppData\Roaming\ESRI\Desktop10.3\ArcCatalog\Connection to HQV_72.sde"

        # Get domains that are assigned to a field
        domainsUsed_names = []
        for dirpath, dirnames, filenames in arcpy.da.Walk(myGDB, datatype=["FeatureClass", "Table"]):
            for filename in filenames:
                print "Checking {}".format(os.path.join(dirpath, filename))

                ## Check for normal field domains
                for field in arcpy.ListFields(os.path.join(dirpath, filename)):
                    if field.domain:
                        domainsUsed_names.append(field.domain)

                ## Check for domains used in a subtype field
                subtypes = arcpy.da.ListSubtypes(os.path.join(dirpath, filename))
                for stcode, stdict in subtypes.iteritems():
                    if stdict["SubtypeField"] != u'':
                        for field, fieldvals in stdict["FieldValues"].iteritems():
                            if not fieldvals[1] is None:
                                domainsUsed_names.append(fieldvals[1].name)
                ## end for subtypes
            ## end for filenames
        ## end for geodatabase Walk

        # List of all existing domains (as domain objects)
        domainsExisting = arcpy.da.ListDomains(myGDB)

        # Find existing domain names that are not in use (using set difference)
        domainsUnused_names = (
            set([dom.name for dom in domainsExisting]) - set(domainsUsed_names)
        )

        # Get domain objects for unused domain names
        domainsUnused = [
            dom for dom in domainsExisting
            if dom.name in domainsUnused_names
        ]
        print "{} unused domains in {}".format(len(domainsUnused), myGDB)

        # Cleanup
        #del domainsExisting
        #del domainsUnused_names

    finally:
        # Cleanup
        arcpy.ClearWorkspaceCache_management()
Esempio n. 20
0
def sde2gdb(sde_connection,featureclass_name,output_path,fgdb_name,outFCname):
    #establish sde connection workspace
    arcpy.env.workspace = sde_connection
    #does the geodatabase exist if so delete it
    if (os.path.isdir(output_path+'\\'+fgdb_name+'.gdb')== True):
        shutil.rmtree(output_path+'\\'+fgdb_name+'.gdb')
    #create a version 10 FileGDB. Note Selfservice only supports FileGDB <10.3
    arcpy.CreateFileGDB_management(output_path, fgdb_name,"10.0")
    #Export the sde featureclass to the filegdb
    arcpy.FeatureClassToFeatureClass_conversion(sde_connection+'//'+featureclass_name, 
                                            output_path+'\\'+fgdb_name+'.gdb', 
                                            outFCname)
    #drop SDE connection 
    arcpy.ClearWorkspaceCache_management()
    return output_path+'\\'+fgdb_name+'.gdb'
Esempio n. 21
0
def sde2shp(sde_connection, featureclass_name, output_path, outputSHPname):
    #establish sde connection workspace
    arcpy.env.workspace = sde_connection
    #does the shapefile exists is so delete it
    if arcpy.Exists(output_path + '//' + outputSHPname):
        arcpy.Delete_management(output_path + '//' + outputSHPname)
    #Export the sde featureclass to the esri shapefile
    arcpy.FeatureClassToFeatureClass_conversion(featureclass_name, output_path,
                                                outputSHPname)
    #drop SDE connection
    arcpy.ClearWorkspaceCache_management()
    return output_path + '\\' + outputSHPname


# Test fuction calls

#test sde2shp <SDE_Connection> <SDE_FC_Name> <output_path> <shapefile_Name>
#print sde2shp(r'C:\AARON_DATA\Connection to PEAS71 - DISS - SDE.sde','GDB.W_IMAGERY_METADATA',r'C:\AARON_DATA','LGATE071.shp')
def extract(xmlFileName, rowLimit, workspace, source, target, datasetType):

    xmlDoc = dla.getXmlDoc(xmlFileName)
    if workspace == "" or workspace == "#" or workspace == None:
        dla.workspace = dla.setWorkspace()
    else:
        dla.workspace = workspace
    fields = dla.getFields(xmlFileName)
    success = True
    name = ''
    try:
        if not arcpy.Exists(dla.workspace):
            dla.addMessage(dla.workspace +
                           " does not exist, attempting to create")
            dla.createGeodatabase()
        if len(fields) > 0:
            arcpy.SetProgressor("step", "Importing Layer...", 0, 1, 1)

            targetName = dla.getDatasetName(target)
            sourceName = dla.getDatasetName(source)
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " +
                                     targetName + "...")

            if not arcpy.Exists(source):
                dla.addError("Layer " + source + " does not exist, exiting")
                return

            retVal = exportDataset(xmlDoc, source, dla.workspace, targetName,
                                   rowLimit, datasetType)
            if retVal == False:
                success = False

        arcpy.SetProgressorPosition()
    except:
        dla.addError("A Fatal Error occurred")
        dla.showTraceback()
        success = False
    finally:
        arcpy.ResetProgressor()
        #arcpy.RefreshCatalog(dla.workspace)
        arcpy.ClearWorkspaceCache_management(dla.workspace)

    return success
Esempio n. 23
0
def cleanUp(twrw_places, gdb, final_file, final_gdb_loc, delete_fields):
    deleteFeatureClass(final_file, final_gdb_loc)

    ap.FeatureClassToFeatureClass_conversion(twrw_places, ap.env.workspace,
                                             final_file)

    for field in delete_fields:
        ap.DeleteField_management(final_file, field)
        print("---------------------------")
        print(field + " DELETED")
        print("---------------------------")

    print("Minority_Final feature class created - Script Complete!!!")

    ap.ClearWorkspaceCache_management()

    # CREATE FINAL FEATURE CLASS
    ap.FeatureClassToFeatureClass_conversion(final_file, final_gdb_loc,
                                             final_file)
    print("---------------------------")
Esempio n. 24
0
def cleanup(directory, gdb_path):
    """Attempts to delete directory and all files within it."""
    # Delete temporary layers and temporary file geodatabase
    print('Deleting temporary layers...')
    arcpy.Delete_management(['parcel_lyr', 'final_lyr', 'lyr_final_parcel'])
    print('Deleting temporary feature classes and tables...')
    arcpy.Delete_management([
        'fc_orig_parcel', 'fc_proj_parcel', 'bsa_export', 'join_table',
        'fc_join_parcel', 'fc_ordered_parcel'
    ])
    arcpy.ClearWorkspaceCache_management()
    print('Deleting temporary geodatabase...')
    arcpy.Delete_management(gdb_path)

    try:
        shutil.rmtree(directory)
        print('All temporary files deleted...')
    except OSError as e:
        print("Error: %s : %s" % (directory, e.strerror))

    return
Esempio n. 25
0
def sde2gdb(sde_connection, featureclass_name, output_path, fgdb_name,
            outFCname):
    #establish sde connection workspace
    arcpy.env.workspace = sde_connection
    #does the geodatabase exist if so delete it
    if (os.path.isdir(output_path + '\\' + fgdb_name + '.gdb') == True):
        shutil.rmtree(output_path + '\\' + fgdb_name + '.gdb')
    #create a version 10 FileGDB. Note Selfservice only supports FileGDB <10.3
    arcpy.CreateFileGDB_management(output_path, fgdb_name, "10.0")
    #Export the sde featureclass to the filegdb
    arcpy.FeatureClassToFeatureClass_conversion(
        sde_connection + '//' + featureclass_name,
        output_path + '\\' + fgdb_name + '.gdb', outFCname)
    #drop SDE connection
    arcpy.ClearWorkspaceCache_management()
    return output_path + '\\' + fgdb_name + '.gdb'


# Test fuction calls
#test sde2fgdb <SDE_Connection> <SDE_FC_Name> <path_to_output_fGDB> <fGDB_Name> <Output_FC_Name>
#print sde2fgdb(r'C:\AARON_DATA\Connection to PEAS71 - DISS - SDE.sde','GDB.W_IMAGERY_METADATA',r'C:\AARON_DATA','mygdb.gdb','LGATE71')
Esempio n. 26
0
def getfcIn_blanktp(args1, args2):

    tp_fcIn = args1
    tp_fcOut = args2

    arcpy.env.workspace = 'C:/Users/JDZ/Documents/ArcGIS/Default.gdb'

    if '37' in tp_fcIn:

        arcpy.CopyFeatures_management('blank_37', tp_fcOut)

    elif '38' in tp_fcIn:

        arcpy.CopyFeatures_management('blank_38', tp_fcOut)

    elif '39' in tp_fcIn:

        arcpy.CopyFeatures_management('blank_39', tp_fcOut)

    arcpy.ClearWorkspaceCache_management()

    return tp_fcOut
Esempio n. 27
0
def push_to_gdb(final_lyr, gis, webgis_config, gis_env_config, services_list):
    """
    Copies the finalized layer to a geodatabase. The feature class will be reprojected, if specified in the config
    file. If a feature service is referencing the feature class, it will be stopped prior to copying features and
    restarted afterwards.
    """

    # Output final_lyr to target geodatabase feature class
    print('Copying features to target geodatabase...')
    arcpy.CopyFeatures_management(final_lyr, gis_env_config['out_fc_name'])

    # Assign domains to fields
    print('Assigning domains to fields...')
    field_domain_lst = [['classcode', 'taxClassDESCR'],
                        ['schooltaxcode', 'taxSchoolDESCR']]
    for domain in field_domain_lst:
        arcpy.AssignDomainToField_management(gis_env_config['out_fc_name'],
                                             domain[0], domain[1])

    print('Altering ObjectID alias...')
    arcpy.AlterField_management(gis_env_config['out_fc_name'],
                                'OBJECTID',
                                new_field_alias='OBJECTID')

    # Clear environment workspace cache
    arcpy.ClearWorkspaceCache_management()

    # Restart feature services
    print('Starting feature services...')
    for serv in services_list:
        print('Starting {} feature service'.format(
            serv.properties.serviceName))
        serv.start()

    # Update the Portal item summary
    print('Updating feature service summary...')
    update_summary(gis, webgis_config['portal_item'])
Esempio n. 28
0
def main(workspace, datatypes, exceptionlist):
    """
    Workflow to register the datasets, featue classes and tables as versioned
    :param workspace:
    :param datatypes:
    :param exceptionlist:
    :return:
    """
    arcpy.env.workspace = workspace

    # Check if version of geodatabase and desktop match, if not, show message
    licence_check(workspace)

    # Create List for all datasets, feature classes and may tables
    ds_list = []
    ds_list.extend(create_datasets_list())
    ds_list.extend(create_featureclasses_list())
    ds_list.extend(create_tables_list(datatypes))

    # Delete the Workspace Cache for more info check
    # https://pro.arcgis.com/de/pro-app/tool-reference/data-management/clear-workspace-cache.htm
    arcpy.ClearWorkspaceCache_management()

    register_db_as_versioned(ds_list, exceptionlist)
def createComposite(info):
    arcpy.env.workspace = info['workspace']

    in_address_locators = info['in_address_locators']
    in_field_map = info['in_field_map']
    in_selection_criteria = info['in_selection_criteria']
    out_composite_address_locator = info['out_composite_address_locator']

    print "Creating the composite locator: " + out_composite_address_locator + "."
    try:

        arcpy.CreateCompositeAddressLocator_geocoding(
            in_address_locators, in_field_map, in_selection_criteria,
            out_composite_address_locator)
        print "Succcesfully Created the composite locator: " + out_composite_address_locator + "!"
    except:
        print 'Error rebuilding compsite geoccoder : ' + out_composite_address_locator + '.'
        print arcpy.GetMessages(2)
        logger.error('Error rebuilding compsite geoccoder : ' +
                     out_composite_address_locator + '.')
        logger.error(arcpy.GetMessages(2))

    arcpy.ClearWorkspaceCache_management()
    arcpy.env.workspace = ""
Esempio n. 30
0
senior_file = "X01_AGE_AND_SEX"
race_file = "X02_RACE"
hisp_file = "X03_HISPANIC_OR_LATINO_ORIGIN"
commute_file = "X08_COMMUTING"
lep_file = "X16_LANGUAGE_SPOKEN_AT_HOME"
pov_file = "X17_POVERTY"
inc_file = "X19_INCOME"

# # DELETE AND CREATE NEW FINAL_{year}.gdb

# # FINAL GDB
final_gdb = f'Final_{year}.gdb'
final_gdb_loc = os.path.join(root_dir, final_gdb)

# ! briefly -- why is this here?
ap.ClearWorkspaceCache_management()

# # Call each of the major functions to create fature classes with
# # statistics for each of the demographics outlined in each function.

# replace the output GDB with an empty one at the start
# replaceGDB(root_dir, final_gdb) # from helpers.py

# medHHInc(year, root_dir, bg_mergegdb, region, places, bg_file, inc_file, final_gdb_loc)

# senior(year, root_dir, bg_mergegdb, region, places, bg_file, senior_file, final_gdb_loc)

# poverty(year, root_dir, bg_mergegdb, region, places, bg_file, pov_file, final_gdb_loc)

# ! why does lep use the census key and the other functions do not?
lep(year, root_dir, bg_mergegdb, region, places, bg_file, lep_file,