def runapp(move_to_working):

    if input_main_folder == 'Review':
        input_folder = TFL_REVIEW_FOLDERS + os.sep + input_tfl
    else:
        input_folder = TFL_PENDING_FOLDERS + os.sep + input_tfl

    input_gdb = input_folder + os.sep +'Data' + os.sep + 'FADM_' + tfl_number + '.gdb'

    arcpy.AddMessage('input folder to move is: ' + input_folder)

    # Call check_for_locks method to check the gdb - compact first to address self-locks
    arcpy.Compact_management(input_gdb)
    gdb_object = geobc.GDBInfo(input_gdb)
    lock_owner_list = gdb_object.check_for_locks()
    if not lock_owner_list:
        arcpy.UncompressFileGeodatabaseData_management(input_gdb) #If the GDB is in pending - it is probably compressed
        #Move the required entities to the pending area
        try:
            shutil.copytree(input_folder,TFL_WORKING_FOLDERS + os.sep + input_tfl)
            #message user and end script
            arcpy.AddMessage('TFL folder has been copied to 2_TFL_Working folder')
            try:
                shutil.rmtree(input_folder)
                arcpy.AddMessage('TFL folder has been deleted from ' + input_main_folder)
            except:
                arcpy.AddWarning('Failed to delete entire contents from ' + input_main_folder + '. Please check that all files are closed and manually delete.')
        except:
            arcpy.AddWarning('Unable to fully copy folder - please check to ensure no files are open, delete the contents in TFL working if any, and try again')
    else:
        arcpy.AddWarning('WARNING: Found lock on geodatabase: ' + str(lock_owner_list) + 'Clear locks before trying again')
Пример #2
0
 def process(self):
     for database in [
             self.sgid_local, self.sgid_network_drive,
             self.basemapdata_remote, self.basemapdata
     ]:
         self.log.info('compacting {}'.format(database))
         arcpy.Compact_management(database)
def clearWSLocks(inputWS):
  if not all([arcpy.Exists(inputWS), arcpy.Compact_management(inputWS), arcpy.Exists(inputWS)]):
    print 'Error with Workspace (%s), killing script...' % inputWS
    exit()
  else:
    print "No locks"
    return True
Пример #4
0
def compactGDB(wspace):
    try:
        arcpy.Compact_management (in_workspace=wspace)
        print "Success: compacted " + wspace
    except Exception as err:
        print err
        sendEmail(err)
Пример #5
0
    def tearDown(self):
        arcpy.env.workspace = self.temp_data_folder
        done = True
        # delete all datasets
        datasets = arcpy.ListDatasets()
        for dataset in datasets:
            arcpy.Delete_management(dataset)

        # delete all workspaces
        workspaces = arcpy.ListWorkspaces()
        for workspace in workspaces:

            #clear all locks
            arcpy.Exists(workspace)
            arcpy.Compact_management(workspace)
            arcpy.Exists(workspace)
            try:
                arcpy.Delete_management(workspace)
            except arcpy.ExecuteError:
                print("cannot delete {0} due to lock".format(workspace))
                done = False

        # delete directory with all remaining files
        if done:
            distutils.dir_util.remove_tree(self.temp_data_folder)
Пример #6
0
    def executer(self, workspace, classe):
    #-------------------------------------------------------------------------------------
        """
        Exécuter le traitement pour détruire les domaines d'attributs codés dans une géodatabase.
        
        Paramètres:
        -----------
        workspace   : Nom de la géodatabase ou les domaines seront créés.
        classe      : Liste des noms de classe contenus dans la géodatabase utilisés pour créer les domaines.
        
        Variables:
        ----------
        Aucune
        """
        
        #Vérification de la présence des paramètres obligatoires
        self.validerParamObligatoire(workspace, classe)
       
        #Valider les valeurs des paramètres
        self.validerValeurParam(workspace)

        #Détruire tous les domaines existants
        self.detruireDomaineAttribut(workspace, classe)
        
        #Vérifier si le type de Géodatabase est LocalDatabase 
        if arcpy.Describe(workspace).workspaceType == "LocalDatabase":
            #Afficher le message
            arcpy.AddMessage("Compact_management('" + workspace + "')")
            #Compacter la Géobase
            arcpy.Compact_management(workspace)
        
        #Sortir
        return
Пример #7
0
    def compactFileGeoDatabase(self):

        compact_interval_days = self.options.get('compact_interval_days', None)
        compact_ready = compact_interval_days and datetime.utcnow(
        ).day % compact_interval_days == 0

        if compact_ready:
            arcpy.Compact_management(self.fullpath)
Пример #8
0
def clear(inputWS):
    '''Attempts to clear ArcGIS/Arcpy locks on a workspace.

	Two methods:
	 1: if ANOTHER process (i.e. ArcCatalog) has the workspace open, that process is terminated
	 2: if THIS process has the workspace open, it attempts to clear locks using arcpy.Exists, arcpy.Compact and arcpy.Exists in sequence

	Notes:
	 1: does not work well with Python Multiprocessing
	 2: this will kill ArcMap or ArcCatalog if they are accessing the worspace, so SAVE YOUR WORK

	Required imports: os, psutil
	'''

    # get process ID for this process (treated differently)
    thisPID = os.getpid()

    # normalise path
    _inputWS = os.path.normpath(inputWS)

    # get list of currently running Arc/Python processes
    p_List = []
    ps = psutil.process_iter()
    [
        p_List.append(p.id) for p in ps
        if ('Arc' in p.name) | ('python') in p.name
    ]
    for p in ps:
        if ('Arc' in p.name) or ('python' in p.name):
            p_List.append(p.pid)

    # iterate through processes
    for pid in p_List:
        p = psutil.Process(pid)

        # if any have the workspace open
        if any(_inputWS in pth
               for pth in [fl.path for fl in p.get_open_files()]):
            print('      !!! Workspace open: %s' % _inputWS)

            # terminate if it is another process
            if pid != thisPID:
                print('      !!! Terminating process: %s' % p.name)
                p.terminate()
            else:
                print('      !!! This process has workspace open...')

    # if this process has workspace open, keep trying while it is open...
    while any(_inputWS in pth for pth in
              [fl.path for fl in psutil.Process(thisPID).get_open_files()]):
        print('    !!! Trying Exists, Compact, Exists to clear locks: %s' %
              all([
                  arcpy.Exists(_inputWS),
                  arcpy.Compact_management(_inputWS),
                  arcpy.Exists(_inputWS)
              ]))

    return True
Пример #9
0
def clearSchemaLocks(workspace):
    if all([
            arcpy.Exists(workspace),
            arcpy.Compact_management(workspace),
            arcpy.Exists(workspace)
    ]):
        return True
    else:
        raise Exception("Workspace not clear!")
Пример #10
0
def build_df_from_arcpy(table, columns='all'):
    if columns=='all':
        columns=[f.name for f in arcpy.ListFields(table)]
    cursor = arcpy.SearchCursor(table)
    Df=pd.DataFrame(columns=columns)
    for row in cursor:
        data=pd.DataFrame([row.getValue(x) for x in columns],index=columns,dtype='str').T
        Df=Df.append(data)
    arcpy.Compact_management(Dir_mort+'/species.gdb')
    return Df
Пример #11
0
def clearWSLocks(inputWS):
    '''Attempts to clear locks on a workspace, returns stupid message.'''
    if all([
            arcpy.Exists(inputWS),
            arcpy.Compact_management(inputWS),
            arcpy.Exists(inputWS)
    ]):
        return 'Workspace (%s) clear to continue...' % inputWS
    else:
        return '!!!!!!!! ERROR WITH WORKSPACE %s !!!!!!!!' % inputWS
Пример #12
0
def main():
    file = sys.argv[1]
    desc = arcpy.Describe(file)
    if desc.dataType == 'Workspace' and desc.workspaceType == 'LocalDatabase':
        print('File size BEFORE compact: {}'.format(os.path.getsize(file)))
        arcpy.Compact_management(file)
        print('File size AFTER compact: {}'.format(os.path.getsize(file)))
    else:
        print('Input data must be a personal or file geodatabase')
        print('Could not compact: {}'.format(os.path.basename(file)))
Пример #13
0
def save_result(config_json):
    # This function saves the network analyst results as a map document in PDF

    config_data = read_config_json(config_json)
    save_dir = config_data['directories']['save_dir']
    map_save_name = config_data['filenames']['map_save_name']
    project_dir = config_data['directories']['project_dir']
    basemap_path = config_data['layer_files']['basemap']
    save_path = save_dir + r'/' + map_save_name + r'.pdf'
    gdb_dir = project_dir + r'/Data'
    gdb_name = r'Bikeshare_GDB'
    gdb_path = gdb_dir + r'/' + gdb_name + r'.gdb'

    # set arcpy workspace
    arcpy.env.workspace = gdb_path

    # reference to map document
    mxd = arcpy.mapping.MapDocument(project_dir + r'/Bikeshare_Route.mxd')

    # reset mxd geodatabase connections
    mxd.findAndReplaceWorkspacePaths('', gdb_path)

    # add a basemap
    basemap_layer = arcpy.mapping.Layer(basemap_path)
    df = arcpy.mapping.ListDataFrames(mxd, "*")[0]
    arcpy.mapping.AddLayer(df, basemap_layer, "BOTTOM")

    # create layer references
    route_layer = arcpy.mapping.ListLayers(mxd, "Routes")
    buffer_layer = arcpy.mapping.ListLayers(mxd, "Travel_Points_Buffer")

    # select features in view
    arcpy.SelectLayerByAttribute_management(buffer_layer[0], "NEW_SELECTION")
    arcpy.SelectLayerByAttribute_management(route_layer[0], "ADD_TO_SELECTION")

    # zoom to selected layers
    df.zoomToSelectedFeatures()

    # deselect features in view
    arcpy.SelectLayerByAttribute_management(buffer_layer[0], "CLEAR_SELECTION")
    arcpy.SelectLayerByAttribute_management(route_layer[0], "CLEAR_SELECTION")

    # refresh screen and export map
    arcpy.RefreshActiveView()
    arcpy.mapping.ExportToPDF(mxd, save_path)

    # clear database references
    del df
    del mxd
    del basemap_layer
    del route_layer
    del buffer_layer
    arcpy.Compact_management(gdb_path)

    print('Map Exported!')
Пример #14
0
def clip(raster, nhd_gdb, projection, outfolder):

    env.workspace = nhd_gdb
    env.outputCoordinateSystem = projection
    env.compression = "NONE"  # only final tifs are generated
    env.pyramid = "NONE"

    # Create a feature dataset in NHD file geodatabase named "HUC8_Albers" in Albers projection
    out_feature_dataset = "HUC8_Albers"
    arcpy.CreateFeatureDataset_management(env.workspace, out_feature_dataset,
                                          projection)
    arcpy.RefreshCatalog(nhd)

    # HUC8 polygons each saved as separate fc inheriting albers from environ
    huc8_fc = "WBD_HU8"
    field = "HUC_8"
    arcpy.MakeFeatureLayer_management(huc8_fc, "huc8_layer")

    with arcpy.da.SearchCursor(huc8_fc, field) as cursor:
        for row in cursor:
            if row[0].startswith(nhdsubregion):
                whereClause = ''' "%s" = '%s' ''' % (field, row[0])
                arcpy.SelectLayerByAttribute_management(
                    "huc8_layer", 'NEW_SELECTION', whereClause)
                arcpy.CopyFeatures_management(
                    "huc8_layer",
                    os.path.join(out_feature_dataset, "HUC" + row[0]))

    #retrieve only the single huc8 fcs and not the one with all of them
    fcs = arcpy.ListFeatureClasses("HUC%s*" % nhdsubregion, "Polygon",
                                   out_feature_dataset)
    fcs_buffered = [
        os.path.join(out_feature_dataset, fc + "_buffer") for fc in fcs
    ]
    out_clips = [
        os.path.join(outfolder, "huc8clips" + nhdsubregion,
                     "NED" + fc[3:] + ".tif") for fc in fcs
    ]

    # Buffer HUC8 feature classes by 5000m
    for fc, fc_buffered in zip(fcs, fcs_buffered):
        arcpy.Buffer_analysis(fc, fc_buffered, "5000 meters")

    cu.multi_msg("Created HUC8 buffers.")
    arcpy.RefreshCatalog(nhd)

    # Clips rasters
    cu.multi_msg("Starting HUC8 clips...")
    for fc_buffered, out_clip in zip(fcs_buffered, out_clips):
        arcpy.Clip_management(raster, '', out_clip, fc_buffered, "0",
                              "ClippingGeometry")

    arcpy.Compact_management(nhd)

    cu.multi_msg("Clipping complete.")
def update_fgdb(fgdb, data_file, table):
    arcpy.env.workspace = fgdb

    logging.info('Truncating {0}'.format(table))
    arcpy.TruncateTable_management(table)

    logging.info('Appending records')
    arcpy.Append_management(data_file, coronavirus_table, "TEST")

    logging.info('Compacting fgdb')
    arcpy.Compact_management(fgdb)
def clearWSLocks(inputWS):
    """Attempts to clear locks on a workspace."""
    arcpy.env.workspace = inputWS
    if all([
            arcpy.Exists(inputWS),
            arcpy.Compact_management(inputWS),
            arcpy.Exists(inputWS)
    ]):
        gprint('Workspace (%s) clear to continue...' % inputWS)
    else:
        gprint('!!!!!!!! ERROR WITH WORKSPACE %s !!!!!!!!' % inputWS)
Пример #17
0
def move_and_archive():
    arcpy.env.workspace = working_location

    now = datetime.now()
    date = now.strftime("%Y%m%d")

    update_support_dir = os.path.join(
        TFL_FINAL_FOLDERS, input_tfl, 'documents', 'Update_Support_Documents'
    )  # Dir containing relevant update documents in Final folder
    if os.path.isdir(update_support_dir):
        shutil.rmtree(
            update_support_dir
        )  # delete the directory from the final folder, it shouldn't be archived

    #move the previous final to archive
    shutil.move(TFL_FINAL_FOLDERS + os.sep + input_tfl,
                TFL_ARCHIVE + os.sep + input_tfl + '_' + date)
    arcpy.AddMessage('Moved previous TFL folder to archive')

    #copy the working folder to final
    arcpy.Copy_management(input_folder, TFL_FINAL_FOLDERS + os.sep + input_tfl)
    arcpy.Compact_management(
        os.path.join(TFL_FINAL_FOLDERS, input_tfl, 'Data',
                     'FADM_' + input_tfl + '.gdb')
    )  # there may be leftover locks, compacting seems to get rid of them in this situation
    # shutil.copytree(input_folder, TFL_FINAL_FOLDERS + os.sep + input_tfl, ignore=shutil.ignore_patterns('*.lock'))    #alternative workaround for lock issues
    arcpy.AddMessage('Moved package to TFL Final folder')

    #recreate the Update_Support_Documents folder in Final
    os.mkdir(update_support_dir)

    # attempt to delete the working folder from Pending
    try:
        arcpy.Compact_management(input_gdb)
        shutil.rmtree(input_folder)
    except Exception as e:
        arcpy.AddWarning(
            '\n== ACTION REQUIRED == {} could not be deleted from the Pending folder. Please be sure to manually delete it'
            .format(input_tfl))
        logging.error(e)
Пример #18
0
def Do(gdb_dir_in):
    env.workspace = gdb_dir_in
    GDBs = arcpy.ListWorkspaces()
    GDBcount = len(GDBs)
    print str(GDBcount) + ' gdbs to be compacted'
    gdb_index = 1
    for gdb in GDBs:
        if gdb[-4:] in [".gdb"]:
            arcpy.Compact_management(gdb)
            print str(gdb_index) + '\t' + gdb + ' done'
        else:
            print str(gdb_index) + '\t' + gdb + ' is not a gdb file'
        gdb_index = gdb_index + 1
        def list_fcs(input_gdb):

            arcpy.env.workspace = input_gdb
            gdb_work = arcpy.env.workspace
            arcpy.Compact_management(gdb_work)
            datasets = arcpy.ListDatasets(feature_type='feature')
            datasets = [''] + datasets if datasets is not None else []
            for ds in datasets:
                in_features = [
                    str(os.path.join(input_gdb, fc))
                    for fc in arcpy.ListFeatureClasses(feature_dataset=ds)
                ]

                return in_features
Пример #20
0
def build_df_from_arcpy(table, columns='all',dtype=None, index_col = None):
    if columns=='all':
        columns=[f.name for f in arcpy.ListFields(table)]
    cursor = arcpy.SearchCursor(table)
    Df=pd.DataFrame(columns=columns)
    for row in cursor:
        data=pd.DataFrame([row.getValue(x) for x in columns],index=columns).T # removed dtype
        Df=Df.append(data)
#    Df=Df.astype(dtype)
    if index_col != None:
        Df.index = Df[index_col]
        Df.drop(index_col, axis = 1, inplace = True)
    arcpy.Compact_management(Dir_mort+'/species.gdb')
    return Df
def compactGDBs(aFolder):
    for dirpath, dirnames, filenames in arcpy.da.Walk(workspace,
                                                      datatype="Container"):
        for dirname in dirnames:
            if dirname.endswith(".gdb") and arcpy.Describe(
                    os.path.join(dirpath,
                                 dirname)).dataElementType == "DEWorkspace":
                try:
                    arcpy.Compact_management(os.path.join(dirpath, dirname))
                    print("Successfully compacted " + dirname)
                    logger.info("Successfully compacted " + dirname)
                except:
                    print("ERROR: " + str(os.path.join(dirpath, dirname)))
                    logger.info("ERROR: " +
                                str(os.path.join(dirpath, dirname)))
Пример #22
0
    def list_fcs(input_gdb):
        '''Lists All Feature classes in a ESRI Geodatabase. Returns all featureclasses as a List'''

        arcpy.env.workspace = input_gdb
        gdb_work = arcpy.env.workspace
        arcpy.Compact_management(gdb_work)
        datasets = arcpy.ListDatasets(feature_type='feature')
        datasets = [''] + datasets if datasets is not None else []
        for ds in datasets:
            in_features = sorted([
                str(os.path.join(input_gdb, ds, fc))
                for fc in arcpy.ListFeatureClasses(feature_dataset=ds)
            ])
            print in_features
            return in_features
def EmptyGDB(pGDB):
    # Empty the contents of pGDB
    arcpy.AddMessage("Emptying the contents of " + pGDB)
    arcpy.env.workspace = pGDB
    datasetList = arcpy.ListDatasets("*", "")
    datasetList.extend(arcpy.ListFeatureClasses("*", ""))
    datasetList.extend(arcpy.ListTables("*", ""))
    for dataset in datasetList:
        try:
            arcpy.Delete_management(dataset)
        except:
            arcpy.AddWarning("Could not delete " + dataset + "\n- check for possible locks?\n" + str(sys.exc_info()[0]))
    arcpy.AddMessage("Compacting " + pGDB)
    try:
        arcpy.Compact_management(pGDB)
    except:
        arcpy.AddWarning("Could not compact " + pGDB)
Пример #24
0
    def run(self):
        self.logger.logMsg("Looping through file geodatabases")
        errors = []
        for db in settings.DATABASES:
            self.archive(db)
            fgd = r'{}\{}.gdb'.format(settings.DBPATH, db)
            sde = r'{}\{}.sde'.format('.\database_connections', db)
            tup = update.updateFGDBfromSDE(fgd, sde, self.logger)
            errors = errors + tup[0]

            self.logger.logMsg('compacting geodatabase')
            arcpy.Compact_management(fgd)

        if update.wasModifiedToday('Roads',
                                   r'{}\{}.gdb'.format(settings.DBPATH,
                                                       'SGID10')):
            rebuild_locators.Runner(self.logger, self.emailer).roads()
        if update.wasModifiedToday('AddressPoints',
                                   r'{}\{}.gdb'.format(settings.DBPATH,
                                                       'SGID10')):
            rebuild_locators.Runner(self.logger, self.emailer).address_points()

        end_time = time.time()
        elapsed_time = end_time - self.start_time
        self.logger.logMsg("total minutes: " + str(elapsed_time / 60))

        if len(errors) > 0:
            errors = '\n\n'.join(errors)
            errors = re.sub('(\n\n|^)(.*): schema change detected',
                            r'\1\2: schema change detected - force update: http://172.16.17.56/arcgis/rest/services/ForceSchemaUpdate/GPServer/Force%20Schema%20Update/execute?Feature_Class_Name=\2&f=json',
                            errors)
            txt = "Updated Datasets: \n{}\n\nUpdate Errors:\n{}\n\nLog:\n{}"
            self.emailer.sendEmail(
                'mapserv-data-update: update errors',
                txt.format("\n".join(update.changes),
                           errors,
                           self.logger.log))
        else:
            txt = "Updated Datasets: \n{}\n\nLog:\n{}"
            self.emailer.sendEmail(
                "mapserv-data-update: success",
                txt.format('\n'.join(update.changes),
                           self.logger.log))

        print("done")
Пример #25
0
    def _replace_with_tempdb(self, temp_db, project_db):
        """
        replace project_db with temp_db

        Parameters
        ----------
        temp_db : str
        project_db : str
        """
        self.tool.output.change_layers_workspace(project_db, temp_db)
        res = arcpy.Compact_management(project_db)
        del res
        arcpy.Delete_management(project_db)
        arcpy.Copy_management(temp_db, project_db)
        # repair datasource of layers that reference the project_db
        # which was temporarily deleted
        self.tool.output.change_layers_workspace(temp_db, project_db)
        return 1
Пример #26
0
def compressGDB(workspace):
    # compact or compress the workspace
    retVal = False
    desc = arcpy.Describe(workspace)
    if desc.workspaceType == "RemoteDatabase":
        try:
            addMessageLocal("Database Compress...")
            arcpy.Compress_management(workspace)
            retVal = True
        except:
            addMessageLocal("Database Compress failed, continuing")
    elif desc.workspaceType == "LocalDatabase":
        try:
            addMessageLocal("Database Compact...")
            arcpy.Compact_management(workspace)
            retVal = True
        except:
            addMessageLocal("Local Database Compact failed, continuing")
    return retVal
def update_fgdb(fgdb, data_file, table):
    arcpy.env.workspace = fgdb
    logging.info('Truncating {0}'.format(table))
    arcpy.TruncateTable_management(table)
    logging.info('Appending records')

    arcpy.Append_management(
        data_file, "Coronavirus_Cases", "NO_TEST",
        r'State "State" true true false 8000 Text 0 0,First,#,data_file,State,0,8000;'
        r'Country "Country" true true false 8000 Text 0 0,First,#,data_file,Country,0,8000;'
        r'County_Name "County Name" true true false 50 Text 0 0,First,#,data_file,County Name,0,8000;'
        r'Full_County_Name "Full County Name" true true false 8000 Text 0 0,First,#,data_file,Full County Name,0,8000;'
        r'Cases "Cases" true true false 4 Long 0 0,First,#,data_file,Cases,-1,-1;'
        r'Update_Time "Update_Time" true true false 8 Date 0 0,First,#,data_file,Update Time,-1,-1;'
        r'UVA_URL "Source" true true false 255 Text 0 0,First,#,data_file,UVA URL,0,8000;'
        r'Harvard_URL "Harvard_URL" true true false 255 Text 0 0,First,#,data_file,Harvard URL,0,8000',
        '', '')
    logging.info('Compacting fgdb')
    arcpy.Compact_management(fgdb)
Пример #28
0
def GenerateQALasDataset(strJobId,
                         createQARasters=False,
                         createMissingRasters=True,
                         overrideBorderPath=None):
    Utility.printArguments([
        "WMXJobID", "createQARasters", "createMissingRasters",
        "overrideBorderPath"
    ], [strJobId, createQARasters, createMissingRasters, overrideBorderPath],
                           "A04 GenerateQALasDataset")

    aa = datetime.now()
    arcpy.AddMessage("Checking out licenses")
    arcpy.CheckOutExtension("3D")
    arcpy.CheckOutExtension("Spatial")

    ProjectJob, project, strUID = getProjectFromWMXJobID(
        strJobId)  # @UnusedVariable

    las_qainfo, lasd_boundary = processJob(ProjectJob, project,
                                           createQARasters,
                                           createMissingRasters,
                                           overrideBorderPath)
    try:
        if las_qainfo is not None and os.path.exists(las_qainfo.filegdb_path):
            arcpy.Compact_management(in_workspace=las_qainfo.filegdb_path)
    except:
        pass

        # @TODO: Move this to another standalone script
        # updateCMDR(ProjectJob, project, las_qainfo, updatedBoundary)

    arcpy.AddMessage("Checking in licenses")
    arcpy.CheckInExtension("3D")
    arcpy.CheckInExtension("Spatial")

    if las_qainfo.num_las_files <= 0:
        raise Exception(
            "Project has no .las files in DELIVERED LAS_CLASSIFIED or LAS_UNCLASSIFIED folders, CANNOT CONTINUE.\nERROR: {}"
            .format(project))

    doTime(aa, "Operation Complete: A04 Generate QA LASDataset")
Пример #29
0
 def compact_gdbs(self, project):
     """
     compact all filegeodatabases to remove locks
     """
     projektPfad = self.folders.get_projectpath(project)
     for root, dirs, files in os.walk(projektPfad):
         for folder in dirs:
             if folder.endswith(".gdb"):
                 gdb = join(root, folder)
                 arcpy.AddMessage(u'Compact {}'.format(gdb))
                 try:
                     res = arcpy.Compact_management(gdb)
                     # del res could help to avoid schema locks
                     del res
                     gc.collect()
                     arcpy.AddMessage(u'Delete {}'.format(gdb))
                     res = arcpy.Delete_management(gdb)
                     del res
                 except arcpy.ExecuteError:
                     # .gdb-folder is no valid fgdb
                     # try with shutil.rmtree instead
                     shutil.rmtree(gdb, ignore_errors=True)
Пример #30
0
def main(*args, **kwargs):

    # create the FileGeoDatabase if it does not already exist
    fire_fgdb = FileGeoDatabase(partialPathToGeoDB, geoDBName, {

        "compact_interval_days":7
    })

    feature_class_name = "global_fire"

    # create the main fire feature class if it does not already exist
    feature_class = createFeatureClass(fire_fgdb.fullpath, feature_class_name)

    # execute the main ETL operation
    is_successful_new_run = executeETL(feature_class)

    if is_successful_new_run:
        pkl_file = open('config.pkl', 'rb')
        myConfig = pickle.load(pkl_file) #store the data from config.pkl file
        pkl_file.close()
        # refresh all services to update the data
        fire_services = ["ReferenceNode/MODIS_Fire_1DAY", "ReferenceNode/MODIS_Fire"]
        # KS Mod 07-2014 (Adding support for restarting services via web tokens)    part 2        START
        # Restart the services
        FIRE_Service_Options = [{
            "Description":"FIRE Dataset Service",
            "admin_dir_URL":myConfig['admin_dir_URL'],
            "username":myConfig['username'],
            "password":myConfig['password'],
            "folder_name":myConfig['folder_name'],
            "service_name":myConfig['service_name'],
            "service_type":myConfig['service_type']
        }]
        global g_UpdateLog
        Do_Update_Services(FIRE_Service_Options, g_UpdateLog)
        # KS Mod 07-2014 (Adding support for restarting services via web tokens)    part 2        END
    arcpy.Compact_management(pathToGeoDatabase)