コード例 #1
0
def main(*args, **kwargs):
    
    debug_log_output_directory = os.path.join(sys.path[0], "Fire_logs")
    etl_debug_logger = ETLDebugLogger(debug_log_output_directory, "Fire", {
                                                                                          
        "debug_log_archive_days":7           
    }) 
    update_debug_log = etl_debug_logger.updateDebugLog # retrieve a reference to the debug logger function
    
    # create the FileGeoDatabase if it does not already exist
    fire_fgdb = FileGeoDatabase("PATH TO GEODATABASE ON DISK\\FileGeodatabases\\", "Fire.gdb", {
                                                                                              
        "compact_interval_days":7
    })
    
    feature_class_name = "global_fire"
    
    # create the main fire feature class if it does not already exist
    feature_class = createFeatureClass(fire_fgdb.fullpath, feature_class_name)
    
    # execute the main ETL operation
    is_successful_new_run = executeETL(feature_class, update_debug_log)
    
    if is_successful_new_run:
        
        fire_agsm = ArcGISServiceManager({
                                          
            'debug_logger':update_debug_log,
            'server_name':'localhost',
            'server_port':'6080',
            'username':'******',
            'password':'******',
            'service_dir':'ReferenceNode',
            'services':['MODIS_Fire.MapServer', 'MODIS_Fire_1DAY.MapServer']
         })
        fire_agsm.refreshServices()
        
        # this is ran due to the Fire.gdb having frequent updates and deletions
        fire_fgdb.compactFileGeoDatabase() 
        
    # delete outdated debug logs
    etl_debug_logger.deleteOutdatedDebugLogs()
コード例 #2
0
def main(): 
    
    # select land cover year to process, this will be relfected throughout the entire modules config
    land_cover_year_to_process = "200X"
    
    # output location for the raster catalogs and each land cover raster dataset, creates the FileGeoDatabase if it does not exist    
    land_cover_fgdb = FileGeoDatabase(sys.path[0], "LandCover.gdb")
    
    # get reference to raster catalog
    raster_catalog = createRasterCatalog(land_cover_year_to_process, land_cover_fgdb.fullpath)
    
    # execute the main ETL operation
    successful_new_run = executeETL(raster_catalog, land_cover_year_to_process)

    if successful_new_run:
        # if successful_new_run, then create the land cover type raster dataset
        createLandCoverDataset(land_cover_year_to_process, land_cover_fgdb.fullpath, raster_catalog.fullpath)
コード例 #3
0
def main(*args, **kwargs):

    # create the FileGeoDatabase if it does not already exist
    modis_gdb = FileGeoDatabase(
        "PATH ON DISK TO FGDB \\Himalaya\\FileGeodatabases\\", "MODIS.gdb")

    # retrieve a reference to the raster catalog, create the raster catalog if it does not already exist
    raster_catalog = createRasterCatalog(modis_gdb.fullpath, "MODIS_NDVI")

    # execute the main ETL operation
    successful_new_run = executeETL(raster_catalog)

    if successful_new_run:
        # refresh all services to update the data
        modis_ndvi_services = ("Himalaya/BHUTAN_NDVI_AQUA",
                               "Himalaya/BHUTAN_NDVI_TERRA",
                               "Himalaya/NEPAL_NDVI_AQUA",
                               "Himalaya/NEPAL_NDVI_TERRA")
        modis_ndvi_service = AGServiceManager(
            modis_ndvi_services,
            "PATH ON DISK TO ARCSOM FOR RESTART\\ReferenceNode\\ETL\\ETLTools\\AGSSOM.exe",
            "localhost")
        modis_ndvi_service.refreshService()
コード例 #4
0
def main(*args, **kwargs):

    # create the FileGeoDatabase if it does not already exist
    fire_fgdb = FileGeoDatabase(partialPathToGeoDB, geoDBName, {

        "compact_interval_days":7
    })

    feature_class_name = "global_fire"

    # create the main fire feature class if it does not already exist
    feature_class = createFeatureClass(fire_fgdb.fullpath, feature_class_name)

    # execute the main ETL operation
    is_successful_new_run = executeETL(feature_class)

    if is_successful_new_run:
        pkl_file = open('config.pkl', 'rb')
        myConfig = pickle.load(pkl_file) #store the data from config.pkl file
        pkl_file.close()
        # refresh all services to update the data
        fire_services = ["ReferenceNode/MODIS_Fire_1DAY", "ReferenceNode/MODIS_Fire"]
        # KS Mod 07-2014 (Adding support for restarting services via web tokens)    part 2        START
        # Restart the services
        FIRE_Service_Options = [{
            "Description":"FIRE Dataset Service",
            "admin_dir_URL":myConfig['admin_dir_URL'],
            "username":myConfig['username'],
            "password":myConfig['password'],
            "folder_name":myConfig['folder_name'],
            "service_name":myConfig['service_name'],
            "service_type":myConfig['service_type']
        }]
        global g_UpdateLog
        Do_Update_Services(FIRE_Service_Options, g_UpdateLog)
        # KS Mod 07-2014 (Adding support for restarting services via web tokens)    part 2        END
    arcpy.Compact_management(pathToGeoDatabase)
コード例 #5
0
def main(*args, **kwargs):

    # initialize debug logger instance -------------------------------------
    debug_log_output_directory = os.path.join(sys.path[0], "TRMM_logs")
    etl_debug_logger = ETLDebugLogger(debug_log_output_directory, "TRMM",
                                      {"debug_log_archive_days": 7})
    update_debug_log = etl_debug_logger.updateDebugLog  # retrieve a reference to the debug logger function

    # color map that is applied to each 3-hour raster as well as each N-day cumulative raster (except the 30 day)
    color_map = "PATH TO RASTER COLOR MAP\\ReferenceNode\\MapServices\\trmm_3hour.clr"

    # output location for the raster catalog and each N-day cumulative raster, creates the FileGeoDatabase if it does not exist
    trmm_fgdb = FileGeoDatabase(
        "PATH TO FGDB \\ReferenceNode\\FileGeodatabases\\", "TRMM.gdb")

    # spatial projection to apply to all rasters
    spatial_projection = "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119521E-09;0.001;0.001;IsHighPrecision"

    # where the output Raster Catalog is saved
    output_basepath = trmm_fgdb.fullpath

    # get a reference to raster catalog, create one if it does not exist
    raster_catalog = getRasterCatalog(output_basepath, spatial_projection)

    # used to reference all start datetimes so that they are in sync with each other
    start_datetime = datetime.utcnow()
    end_datetime = start_datetime - timedelta(
        days=raster_catalog.options['archive_days'])

    # execute the main ETL operation
    is_successful_new_run = executeETL(raster_catalog, spatial_projection,
                                       start_datetime, end_datetime, color_map,
                                       update_debug_log)

    if is_successful_new_run:

        # refresh all services to update the data
        trmm_agsm = ArcGISServiceManager({
            'debug_logger':
            update_debug_log,
            'server_name':
            'localhost',
            'server_port':
            '6080',
            'username':
            '******',
            'password':
            '******',
            'service_dir':
            'ReferenceNode',
            'services': [
                'TRMM.MapServer', 'TRMM_1DAY.MapServer', 'TRMM_7DAY.MapServer',
                'TRMM_30DAY.MapServer'
            ]
        })

        update_debug_log("stopping services...")
        trmm_agsm.stopServices()

        createTRMMComposities(raster_catalog, output_basepath, start_datetime,
                              color_map)

        update_debug_log("starting services...")
        trmm_agsm.startServices()

    # delete outdated debug logs
    etl_debug_logger.deleteOutdatedDebugLogs()
コード例 #6
0
def executeETL(raster_catalog, land_cover_year_to_process):
    
    # initialize utility objects -------------------------------------
    debug_log_output_directory = os.path.join(sys.path[0], "Land_logs")
    etl_debug_logger = ETLDebugLogger(debug_log_output_directory, "Land", {
                                                                                          
        "debug_log_archive_days":7          
    }) 
    update_debug_log = etl_debug_logger.updateDebugLog # retrieve a reference to the debug logger function
    
    etl_exception_manager = ETLExceptionManager(sys.path[0], "Land_exception_reports", {
                                                                                    
        "create_immediate_exception_reports":True
    })
    
    # initialize core ETL objects -------------------------------------
    arcpy_land_extract_validator = LandExtractValidator({
                                                         
        "raster_catalog":raster_catalog,
        "ftp_file_name_field":"DistributedFileName",
        "debug_logger":update_debug_log
    })
    arcpy_land_extractor = LandExtractor({
                                          
        "target_file_extn":'hdf',
        "ftp_options": {   
            #THE SOURCE OF THE HDFs COLLECTED MAY NEED TO BE UPDATED                        
            "ftp_host":'e4ftl01.cr.usgs.gov', 
            "ftp_user":'******', 
            "ftp_pswrd":'anonymous'
        },
        "debug_logger":update_debug_log
    })
    
    scratch_fgdb_fullpath = FileGeoDatabase(sys.path[0], 'scratch.gdb').fullpath
    arcpy_land_transformer = LandTransformer({
                                              
        "output_file_geodatabase":scratch_fgdb_fullpath,
        "debug_logger":update_debug_log
    })
    land_meta_data_transformer = LandMetaDataTransformer(
                                                         
        {"debug_logger":update_debug_log},
        decoratee=arcpy_land_transformer
    )

    arcpy_land_loader = LandLoader({
                                    
        "raster_catalog":raster_catalog,
        "CopyRaster_management_config":{
            'config_keyword':'',
            'background_value':'',
            'nodata_value':'',
            'onebit_to_eightbit':'',
            'colormap_to_RGB':'',
            'pixel_type':''
        },
        "debug_logger":update_debug_log
    })
    
    etl_controller = ETLController(sys.path[0], "LandCover_ETL", {
                                                              
        "remove_etl_workspace_on_finish":False
    })
    
    land_etl_delegate = LandETLDelegate({
                                         
        "ftp_dirs":['/MOTA/MCD12Q1.005/'+land_cover_year_to_process+'.01.01/'],
        "ftp_file_meta_extn":'xml',
        "all_or_none_for_success":True,
        "debug_logger":update_debug_log,
        'exception_handler':etl_exception_manager.handleException
    })
        
    # set ETLDelegate object properties-------------------------------------
    land_etl_delegate.setExtractValidator(arcpy_land_extract_validator)
    land_etl_delegate.setExtractor(arcpy_land_extractor)
    land_etl_delegate.setTransformer(land_meta_data_transformer)
    land_etl_delegate.setLoader(arcpy_land_loader)
    land_etl_delegate.setETLController(etl_controller)

    # execute the ETL operation -------------------------------------    
    successful_new_run = land_etl_delegate.startETLProcess()
    
    # perform post-ETL operations -------------------------------------
    etl_exception_manager.finalizeExceptionXMLLog()
    etl_debug_logger.deleteOutdatedDebugLogs()
    
    return successful_new_run