Exemplo n.º 1
0
def recalculate_extent(fc):

    if arcpy.CheckProduct("ArcInfo") == "Available"                            \
    or arcpy.CheckProduct("ArcEditor") == "Available":
        arcpy.RecalculateFeatureClassExtent_management(fc)

    else:
        arcpy.CompressFileGeodatabaseData_management(fc)
        arcpy.UncompressFileGeodatabaseData_management(fc)
Exemplo n.º 2
0
        #             print ee
        # #
        if fcList:
            for lmifc in fcList:
                destfc = lmifc[lmifc.find('.') + 1:]
                totalfcdest = totaldest + os.sep + destfc
                print lmifc + " ---->  " + totalfcdest
                try:
                    arcpy.Copy_management(lmifc, totalfcdest)
                except arcpy.ExecuteError as ee:
                    print ee
                    # arcpy.Copy_management(lmifc, totalfcdest+"_1")

        print "Compressing : " + fileGeoDbLocation + "\\" + fileGeoDb
        # 压缩一下文件地理数据库
        arcpy.CompressFileGeodatabaseData_management(fileGeoDbLocation + "\\" +
                                                     fileGeoDb)

    elapsed = (time.clock() - start)
    print "Total time : " + str(elapsed) + " seconds"
    print "or : " + str(elapsed / 60) + " minutes"
    print "or : " + str(elapsed / 3600) + " hours"
    resultFile = open(basedir + "result.txt", "w")
    resultFile.write("yes")
    resultFile.close()

except Exception as e:
    print e
    exceptFile = open(basedir + "exception.txt", "w")
    resultFile = open(basedir + "result.txt", "w")
    resultFile.write("no")
    resultFile.close()
Exemplo n.º 3
0
class CreerCopieBaseDonneesFGDB(object):
    #*******************************************************************************************
    """
    Permet de créer une copie d'une Base de Données spatiale dans une FGDB.
    """

    #-------------------------------------------------------------------------------------
    def __init__(self):
        #-------------------------------------------------------------------------------------
        """
        Initialisation du traitement pour créer une copie d'une Base de Données spatiale dans une FGDB.
        
        Paramètres:
        -----------
        Aucun
        
        Variables:
        ----------
        Aucun
        
        """

        #Sortir
        return

    #-------------------------------------------------------------------------------------
    def validerParamObligatoire(self, database, repertoire, fgdb,
                                proprietaire):
        #-------------------------------------------------------------------------------------
        """
        Validation de la présence des paramètres obligatoires.

        Paramètres:
        -----------
        database        : Nom de la Base de Données dans lequel on veut copier les tables et les FeatureClass dans une FGDB.
        repertoire      : Nom du répertoire dans lequel la FGDB doit être créée.
        fgdb            : Nom de la FGDB à créer.
        proprietaire    : Nom du propriétaire des tables ou featureClass à copier dans la FGDB.

        Retour:
        -------
        Exception s'il y a une erreur de paramètre obligatoire
        """

        #Affichage du message
        arcpy.AddMessage(
            "- Vérification de la présence des paramètres obligatoires")

        if (len(database) == 0):
            raise Exception("Paramètre obligatoire manquant: %s" % 'database')

        if (len(repertoire) == 0):
            raise Exception("Paramètre obligatoire manquant: %s" %
                            'repertoire')

        if (len(fgdb) == 0):
            raise Exception("Paramètre obligatoire manquant: %s" % 'fgdb')

        if (len(proprietaire) == 0):
            raise Exception("Paramètre obligatoire manquant: %s" %
                            'proprietaire')

        #Sortir
        return

    #-------------------------------------------------------------------------------------
    def executer(self, database, repertoire, fgdb, proprietaire, compression):
        #-------------------------------------------------------------------------------------
        """
        Exécuter le traitement pour créer une copie d'une Base de Données spatiale dans une FGDB.
        
        Paramètres:
        -----------
        database        : Nom de la Base de Données dans lequel on veut copier les tables et les FeatureClass dans une FGDB.
        repertoire      : Nom du répertoire dans lequel la FGDB doit être créée.
        fgdb            : Nom de la FGDB à créer.
        proprietaire    : Nom du propriétaire des tables ou featureClass à copier dans la FGDB.
        compression     : Indique si on doit effectuer une compression (True) ou non (False).
        
        Variables:
        ----------

        """

        #Définir le Workspace par défaut selan celui de la Base de Données
        arcpy.env.workspace = database

        #Extraire la liste des Tables
        listeTables = arcpy.ListTables(proprietaire + "*")

        #Extraire la liste des FeatureClass
        listeFeatureClass = arcpy.ListFeatureClasses(proprietaire + "*")

        #Vérifier si la FGDB existe déj�
        if arcpy.Exists(repertoire + "\\" + fgdb + ".gdb"):
            #Envoyer un avertissement
            arcpy.AddWarning("La FGDB existe déjà !")
            fgdb = repertoire + "\\" + fgdb + ".gdb"
        #Si la FGDB n'existe pas
        else:
            #Créer la FGDB
            arcpy.AddMessage(" ")
            fgdb = str(arcpy.CreateFileGDB_management(repertoire, fgdb))
            arcpy.AddMessage(arcpy.GetMessages())

        #Copier toutes les tables de la DataBase vers la FGDB
        for table in listeTables:
            #Afficher un message pour Copier la table dans la FGDB
            arcpy.AddMessage(" ")
            arcpy.AddMessage("Executing: TableToTable " + table + " " + fgdb +
                             " " + table.replace(proprietaire + ".", ""))
            #Vérifier si la table existe déj�
            if arcpy.Exists(fgdb + "\\" +
                            table.replace(proprietaire + ".", "")):
                #Envoyer un avertissement
                arcpy.AddWarning("La table existe déjà !")
            #Si la table n'existe pas
            else:
                try:
                    #Copier la table dans la FGDB
                    arcpy.TableToTable_conversion(
                        table, fgdb, table.replace(proprietaire + ".", ""))
                    #Afficher tous les messages sauf le premier
                    for i in range(1, arcpy.GetMessageCount()):
                        arcpy.AddMessage(arcpy.GetMessage(i))

            #Gestion des erreurs
                except Exception, err:
                    #Afficher tous les messages sauf le premier
                    for i in range(1, arcpy.GetMessageCount()):
                        #Afficher un message d'erreur
                        arcpy.AddError(arcpy.GetMessage(i))

        #Copier toutes les FeatureClass de la DataBase vers la FGDB
        for featureClass in listeFeatureClass:
            #Afficher un message pour Copier la featureClass dans la FGDB
            arcpy.AddMessage(" ")
            arcpy.AddMessage("Executing: FeatureClassToFeatureClass " +
                             featureClass + " " + fgdb + " " +
                             featureClass.replace(proprietaire + ".", ""))
            #Vérifier si la featureClass existe déj�
            if arcpy.Exists(fgdb + "\\" +
                            featureClass.replace(proprietaire + ".", "")):
                #Envoyer un avertissement
                arcpy.AddWarning("La featureClass existe déjà !")
            #Si la featureClass n'existe pas
            else:
                #Copier la featureClass dans la FGDB
                arcpy.FeatureClassToFeatureClass_conversion(
                    featureClass, fgdb,
                    featureClass.replace(proprietaire + ".", ""))
                #Afficher tous les messages sauf le premier
                for i in range(1, arcpy.GetMessageCount()):
                    arcpy.AddMessage(arcpy.GetMessage(i))

        #Vérifier si on doit compresser la FGDB
        if compression:
            #Compression de la FGDB
            arcpy.AddMessage(" ")
            arcpy.AddMessage(
                "Executing: CompressFileGeodatabaseData_management " + fgdb)
            arcpy.CompressFileGeodatabaseData_management(fgdb)
            arcpy.AddMessage(arcpy.GetMessages())

        #Sortir
        arcpy.AddMessage(" ")
        return
Exemplo n.º 4
0

def doSimplify(input):
    output = os.path.join(To_GDB, os.path.basename(input))
    print "{} => {}".format(input, output)
    if not arcpy.Exists(output):
        arcpy.SimplifyPolygon_cartography(input, output, "POINT_REMOVE",
                                          "10 Meters", "0 SquareMeters",
                                          "NO_CHECK", "NO_KEEP")


def doCopy(input):
    output = os.path.join(To_GDB, os.path.basename(input))
    print "{} => {}".format(input, output)
    if not arcpy.Exists(output):
        arcpy.FeatureClassToFeatureClass_conversion(input, To_GDB,
                                                    os.path.basename(input),
                                                    "", "", "")


for f in SIMPLIFY_FEATURES:
    doSimplify(f)

for f in NON_SENSITIVE_FEATURES:
    doCopy(f)

for f in PROVINCIAL_FEATURES:
    doCopy(f)

arcpy.CompressFileGeodatabaseData_management(To_GDB)
Exemplo n.º 5
0
def runapp(tfl_submit_edits):

    #First check input parameter requirements - because optional parameters from tool may be required, they cannot be set in tool
    if check_input_parameters():

        #establish the BCGW connection to check the schedule A AND TFL Boundary
        BCGWConnection = get_bcgw_connection(bcgw_uname, bcgw_pw)

        if not BCGWConnection is False:  #only proceed if connection succeeds

            # Call check_for_locks method to check the edit gdb
            gdb_object = geobc.GDBInfo(input_gdb)
            lock_owner_list = gdb_object.check_for_locks()
            if not lock_owner_list:
                arcpy.env.workspace = input_gdb

                #Get the extract date - used for the change detection and for the reviewer update
                #add reference
                history_table = geobc.TableInfo()
                check_out_date = history_table.get_last_date(
                    change_history, 'Date_Extracted')

                #Check all TFL Poly features and edit date on schedule a - make list of datasets to update
                datasets_to_update = get_update_list(check_out_date,
                                                     BCGWConnection)

                #Check for locks on all the output datasets to update - only proceed if clear
                if not check_outputs_for_locks(datasets_to_update):
                    #first uncompress the gdb, needed for updating submitter and adding intersecting cadastre
                    arcpy.UncompressFileGeodatabaseData_management(input_gdb)

                    #Remove the previous TFL from the TFL overview and add the updated one
                    update_tfl_overview()

                    if 'Replacement' in datasets_to_update:
                        update_tfl_agreement()

                    if 'Addition' in datasets_to_update:
                        #if a single instrument, first set skey on the local data based on the inputs - also updates input fields
                        if change_type == 'Instrument - Single':
                            if tfl_component == 'Schedule A':
                                update_skey('Addition', 831)
                            else:
                                update_skey('Addition', 832)
                        update_tfl_addition()

                    if 'Deletion' in datasets_to_update:
                        #if a single instrument, first set skey on the local data based on the inputs
                        if change_type == 'Instrument - Single':
                            if tfl_component == 'Schedule A':
                                update_skey('Deletion', 835)
                            else:
                                update_skey('Deletion', 836)
                        update_tfl_deletion()

                    if 'Schedule_A' in datasets_to_update:
                        update_tfl_schedule_a()

                    #Update the change history with the submitter and datestamp
                    check_out_date = check_out_date.strftime(
                        "%b %d %Y %H:%M:%S"
                    )  #format to string for use in query
                    update_submitter(check_out_date)

                    #Create copy of cadastre that intersects TFL lines and add to final folder
                    intersect_cadastre(BCGWConnection, datasets_to_update,
                                       check_out_date)

                    #Compress the database now to prevent accidental edits
                    arcpy.CompressFileGeodatabaseData_management(input_gdb)

                    #Move the review package folder to the new final and rename it with datestamp
                    move_and_archive()
                    if change_type == 'Instrument - Multiple':
                        arcpy.AddMessage(
                            'CAUTION: Submitted multiple instruments to local staging folders SKEY MUST be populated prior to moving to DataBC Staging'
                        )
                    arcpy.AddMessage(
                        'Script run complete - check messages and outputs')

                else:
                    print('locks found on output')
                    arcpy.AddMessage(
                        'Locks found on output - check and re-run')

            else:
                print(lock_owner_list)
                arcpy.AddWarning('Found lock on geodatabase: ' +
                                 str(lock_owner_list))
        else:
            arcpy.AddWarning(
                'Error making BCGW connection - check credentials and re-try')
Exemplo n.º 6
0
    def execute(self):

        env.workspace = self.conf.ws

        fgdb_name = self.conf.fgdb_name
        fgdb_file = self.conf.fgdb_file
        fc_name = self.conf.fc_name
        fc_file = self.conf.fc_file

        out_path = self.conf.out_path
        fgdb_name_temp = self.conf.fgdb_name_temp
        fgdb_file_tmp = self.conf.fgdb_file_tmp

        fc_name_temp = self.conf.fc_name_temp
        fc_file_temp = self.conf.fc_file_temp

        out_feature_name = self.conf.out_feature_name
        out_feature_path = self.conf.out_feature_path

        lossless = self.conf.lossless

        try:
            if arcpy.Exists(fgdb_file_tmp):
                arcpy.CompressFileGeodatabaseData_management(
                    fgdb_file_tmp, lossless)
                arcpy.Delete_management(fgdb_file_tmp)

            # Process: Create File GDB
            arcpy.CreateFileGDB_management(out_path, fgdb_name_temp, 'CURRENT')

            # Process: Create Feature Class
            template_name = 'TEMPLATE.shp'
            template_file = os.path.join(out_path, template_name)
            coordinate_system = "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984'," \
                                "SPHEROID['WGS_1984',6378137.0,298.257223563]]," \
                                "PRIMEM['Greenwich',0.0],UNIT['Degree'," \
                                "0.0174532925199433]];" \
                                "-400 -400 1000000000;-100000 10000;" \
                                "-100000 10000;" \
                                "8,98315284119522E-09;0,001;0,001;IsHighPrecision "

            arcpy.CreateFeatureclass_management(fgdb_file_tmp, fc_name_temp,
                                                "POLYGON", template_file,
                                                "DISABLED", "DISABLED",
                                                coordinate_system, "", "0",
                                                "0", "0")

            # Process: Append
            field_mappings = arcpy.FieldMappings()
            # Add target dataset
            field_mappings.addTable(os.path.join(fgdb_file_tmp, fc_name_temp))
            # Add append dataset
            field_mappings.addTable(fc_file)

            arcpy.Append_management(fc_file,
                                    os.path.join(fgdb_file_tmp, fc_name_temp),
                                    "NO_TEST", "", "")

            # Process: Calculate Field
            field_name = "ZONA"
            exp_value = "valor()"
            block_value = "def valor(): return '01'"
            lang_value = "PYTHON_9.3"

            arcpy.CalculateField_management(in_table=fc_file_temp,
                                            field=field_name,
                                            expression=exp_value,
                                            expression_type=lang_value,
                                            code_block=block_value)

            field_name = "ULT_ALTE"
            exp_value = "ts()"
            lang_value = "PYTHON_9.3"
            block_value = "def ts():\\n import time\\n return time.strftime(" \
                          "\"%Y-%m-%d %H:%M:%S\", time.localtime())"

            arcpy.CalculateField_management(in_table=fc_file_temp,
                                            field=field_name,
                                            expression=exp_value,
                                            expression_type=lang_value,
                                            code_block=block_value)

            # Process: Create Feature Class
            if arcpy.Exists(out_feature_path):
                arcpy.Delete_management(out_feature_path)

            arcpy.FeatureClassToFeatureClass_conversion(
                os.path.join(fgdb_file_tmp, fc_name_temp), out_path,
                out_feature_name)

        except Exception:
            arcpy.AddMessage('Erro ao processar.')
            e = sys.exc_info()[1]
            arcpy.AddError(e.args[0])
        else:
            arcpy.AddMessage('Finalizado com sucesso!')
def runapp(tfl_set_to_pending):

    #Confirm that user marked all checks as complete
    if check_1 and check_2 and check_3:

        arcpy.AddMessage('dataset to move is ' + input_gdb)

        # Call check_for_locks method to check the gdb
        gdb_object = geobc.GDBInfo(input_gdb)
        lock_owner_list = gdb_object.check_for_locks()
        if not lock_owner_list:

            #Check to ensure the reviewer running the tool is not the user who extracted the TFL (assumed to be the editor)
            user = getpass.getuser()
            if test:
                user = '******'

            edit_user = get_editor()
            arcpy.Compact_management(input_gdb)
            lock_owner_list = gdb_object.check_for_locks()
            arcpy.AddMessage('Check for locks after get editor: ' +
                             str(lock_owner_list))
            if not user == edit_user:

                errors = coded_domain_validation.validate_domains(
                    input_gdb, workspace)

                if not errors:
                    arcpy.AddMessage('\nNo attribute value errors found')

                    #Remove the topologies
                    remove_topology_and_active_lines(workspace)
                    arcpy.Compact_management(input_gdb)
                    lock_owner_list = gdb_object.check_for_locks()
                    arcpy.AddMessage(
                        'Check for locks after remove topology: ' +
                        str(lock_owner_list))

                    #Set the reviewer
                    set_reviewer(user)
                    #Move the required entities to the pending area
                    arcpy.Compact_management(
                        input_gdb)  #compact to remove any self-locks
                    gdb_object = geobc.GDBInfo(input_gdb)
                    lock_owner_list = gdb_object.check_for_locks()
                    arcpy.AddMessage(
                        'Check for locks after setting reviewer ' +
                        str(lock_owner_list))
                    arcpy.CompressFileGeodatabaseData_management(
                        input_gdb
                    )  #compress gdb to prevent changes from here to final
                    if not lock_owner_list:
                        try:
                            shutil.copytree(
                                input_folder,
                                TFL_PENDING_FOLDERS + os.sep + input_tfl)
                            #message user and end script
                            arcpy.AddMessage(
                                'Copied - TFL folder to 4_TFL_Pending folder')
                            try:
                                shutil.rmtree(input_folder)
                                arcpy.AddMessage(
                                    'Deleted- TFL folder from 3_TFL_Review folder'
                                )
                            except:
                                arcpy.AddWarning(
                                    'WARNING: Unable to delete entire folder after copy - please check that Pending folder is complete then close all files and delete Review folder'
                                )
                        except:
                            arcpy.AddWarning(
                                'WARNING: Unable to copy entire folder - please delete the folder and all contents in 3_TFL_Review. Then be sure all files are closed before trying again'
                            )
                    else:
                        arcpy.AddWarning('Found lock on geodatabase: ' +
                                         str(lock_owner_list))
            else:  #Message the user that the editor cannot review and promote the data
                arcpy.AddWarning(
                    'Review cannot be completed by the same user that ran edit tools'
                )

        else:
            arcpy.AddWarning('WARNING: Found lock on geodatabase: ' +
                             str(lock_owner_list))
    else:
        arcpy.AddWarning(
            'WARNING: Tool cannot be run until all checks have been confirmed')