Exemple #1
0
def flagging_operation(reference_time_string, operation_index, input_directory,
                       iteration, output_directory, window_range,
                       count_threshold, target_flag):

    # get dates in the month to be processed in this operation
    reference_time = parser.parse(reference_time_string)
    processing_dates = operation_dates(reference_time, operation_index)

    # derive flags for each day
    for processdate in processing_dates:

        # check that directory exists and if not then make it
        if not os.path.exists(
                os.path.join(output_directory, str(processdate.year))):
            os.makedirs(os.path.join(output_directory, str(processdate.year)))

        flag_values = local_window_constraint_checks(
            input_directory, output_directory, iteration, processdate,
            window_range, count_threshold, target_flag)

        # set flag values for output
        #flag_values = numpy.zeros( definitions.GLOBAL_FIELD_SHAPE[1:], FLAG_TYPE )

        # save to NetCDF
        outputfile = os.path.join(
            output_directory, '{:04d}'.format(processdate.year),
            'eustace_analysis_{:d}_qc_flags_{:04d}{:02d}{:02d}.nc'.format(
                iteration, processdate.year, processdate.month,
                processdate.day))
        save_flag_file(flag_values, processdate, outputfile)
Exemple #2
0
def flagging_operation(
    reference_time_string,
    operation_index,
    analysis_directory,
    iteration,
    output_directory,
    percentile_directory,
):

    # get dates in the month to be processed in this operation
    reference_time = parser.parse(reference_time_string)
    processing_dates = operation_dates(reference_time, operation_index)

    # derive flags for each day
    for processdate in processing_dates:

        # check that directory exists and if not then make it
        if not os.path.exists(
                os.path.join(output_directory, str(processdate.year))):
            os.makedirs(os.path.join(output_directory, str(processdate.year)))

        # compute flags
        flag_values = location_threshold_checks(analysis_directory,
                                                percentile_directory,
                                                output_directory, iteration,
                                                processdate)

        #save
        outputfile = os.path.join(
            output_directory, '{:04d}'.format(processdate.year),
            'eustace_analysis_{:d}_qc_flags_{:04d}{:02d}{:02d}.nc'.format(
                iteration, processdate.year, processdate.month,
                processdate.day))
        save_flag_file(flag_values, processdate, outputfile)
Exemple #3
0
def flagging_operation(reference_time_string, operation_index, input_directory,
                       iteration, output_directory, start_year, end_year):

    # get dates in the month to be processed in this operation
    reference_time = parser.parse(reference_time_string)
    processing_dates = operation_dates(reference_time, operation_index)

    # derive flags for each day
    for processdate in processing_dates:

        # check that directory exists and if not then make it
        if not os.path.exists(
                os.path.join(output_directory, str(processdate.year))):
            os.makedirs(os.path.join(output_directory, str(processdate.year)))

        flag_values = calendar_constraint_checks(input_directory,
                                                 output_directory, iteration,
                                                 processdate, start_year,
                                                 end_year, CALENDAR_DAY_FLAG)

        # save to NetCDF
        outputfile = os.path.join(
            output_directory, '{:04d}'.format(processdate.year),
            'eustace_analysis_{:d}_qc_flags_{:04d}{:02d}{:02d}.nc'.format(
                iteration, processdate.year, processdate.month,
                processdate.day))
        save_flag_file(flag_values, processdate, outputfile)
Exemple #4
0
def flagging_operation(
    reference_time_string,
    operation_index,
    analysis_directory,
    iteration,
    output_directory,
):

    # get dates in the month to be processed in this operation
    reference_time = parser.parse(reference_time_string)
    processing_dates = operation_dates(reference_time, operation_index)

    # derive flags for each day
    for processdate in processing_dates:

        # check that directory exists and if not then make it
        if not os.path.exists(
                os.path.join(output_directory, str(processdate.year))):
            os.makedirs(os.path.join(output_directory, str(processdate.year)))

        # compute flags
        areal_flag_values = areal_checks(analysis_directory, output_directory,
                                         iteration, processdate,
                                         QUARTILE_LIMIT)
        extreme_flag_values = extremes_checks(analysis_directory,
                                              output_directory, iteration,
                                              processdate, EXTREME_LOWER_LIMIT,
                                              EXTREME_UPPER_LIMIT)

        # join flags
        flag_values = areal_flag_values | extreme_flag_values

        #save
        outputfile = os.path.join(
            output_directory, '{:04d}'.format(processdate.year),
            'eustace_analysis_{:d}_qc_flags_{:04d}{:02d}{:02d}.nc'.format(
                iteration, processdate.year, processdate.month,
                processdate.day))
        save_flag_file(flag_values, processdate, outputfile)
Exemple #5
0
def flagging_operation(reference_time_string, operation_index,
                       analysis_directory, iteration, output_directory,
                       climatology_limit, largescale_limit,
                       constraint_threshold):

    # get dates in the month to be processed in this operation
    reference_time = parser.parse(reference_time_string)
    processing_dates = operation_dates(reference_time, operation_index)

    # derive flags for each day
    for processdate in processing_dates:

        # check that directory exists and if not then make it
        if not os.path.exists(
                os.path.join(output_directory, str(processdate.year))):
            os.makedirs(os.path.join(output_directory, str(processdate.year)))

        # compute flags
        climatology_flag_values = climatology_uncertainty_checks(
            analysis_directory, output_directory, iteration, processdate,
            climatology_limit)
        large_scale_flag_values = large_scale_uncertainty_checks(
            analysis_directory, output_directory, iteration, processdate,
            largescale_limit)
        local_influence_flag_values = local_influence_checks(
            analysis_directory, output_directory, iteration, processdate,
            constraint_threshold)
        marine_flag = get_marine_flag()

        # join flags
        flag_values = climatology_flag_values | large_scale_flag_values | local_influence_flag_values | marine_flag

        #save
        outputfile = os.path.join(
            output_directory, '{:04d}'.format(processdate.year),
            'eustace_analysis_{:d}_qc_flags_{:04d}{:02d}{:02d}.nc'.format(
                iteration, processdate.year, processdate.month,
                processdate.day))
        save_flag_file(flag_values, processdate, outputfile)
Exemple #6
0
def latent_variable_flag(input_directory, output_directory, iteration, processing_dates):
    
    # manually setup the analysis model for the R1413 run - Warning: the eustace svn revision must be correct for the global bias model interpretation to that run analysis
    
    storage_climatology = SpaceTimeComponentSolutionStorageBatched_Files( statefilename_read='/work/scratch/cmorice/advanced_standard/climatology_solution_9/climatology_solution_9.pickle',
                                                                          sample_filename_read='/work/scratch/cmorice/advanced_standard/climatology_solution_sample_9/climatology_solution_sample_9.pickle',
                                                                          prior_sample_filename_read='/work/scratch/cmorice/advanced_standard/climatology_solution_prior_sample_9/climatology_solution_prior_sample_9.pickle',
                                                                          keep_in_memory = True )
    
    storage_large_scale = SpaceTimeComponentSolutionStorageBatched_Files( statefilename_read='/work/scratch/cmorice/advanced_standard/large_scale_solution_9/large_scale_solution_9.pickle',
                                                                          sample_filename_read='/work/scratch/cmorice/advanced_standard/large_scale_solution_sample_9/large_scale_solution_sample_9.pickle',
                                                                          prior_sample_filename_read='/work/scratch/cmorice/advanced_standard/large_scale_solution_prior_sample_9/large_scale_solution_prior_sample_9.pickle',
                                                                          keep_in_memory = True )
                                                                          
    storage_local = eustace.analysis.advanced_standard.components.storage_files_batch.SpatialComponentSolutionStorageIndexed_Files()
    covariates_descriptor = "/gws/nopw/j04/eustace/data/internal/climatology_covariates/covariates.json"
    insitu_biases = True
    breakpoints_file = "/gws/nopw/j04/eustace/data/internal/D1.7/daily/eustace_stations_global_R001127_daily_status.nc"
    global_biases = True
    global_biases_group_list = ["surfaceairmodel_ice_global" , "surfaceairmodel_land_global", "surfaceairmodel_ocean_global"]
    compute_uncertainties = False
    method = 'EXACT'
    compute_sample = False
    sample_size = definitions.GLOBAL_SAMPLE_SHAPE[3]
    compute_prior_sample = False


    print 'VERSION: {0}'.format(get_revision_id_for_module(eustace))

    # Build analysis system
    analysissystem = AnalysisSystem_EUSTACE(storage_climatology, storage_large_scale, storage_local, 
                        covariates_descriptor, insitu_biases, breakpoints_file, global_biases, global_biases_group_list,
                        compute_uncertainties, method)
    
    
    grid_resolution = [180. / definitions.GLOBAL_FIELD_SHAPE[1], 360. / definitions.GLOBAL_FIELD_SHAPE[2]]
    
    latitudes=numpy.linspace(-90.+grid_resolution[0]/2., 90.-grid_resolution[0]/2, num=definitions.GLOBAL_FIELD_SHAPE[1])
    longitudes=numpy.linspace(-180.+grid_resolution[1]/2., 180.-grid_resolution[1]/2, num=definitions.GLOBAL_FIELD_SHAPE[2])
    
    #timebase = TimeBaseDays(eustace.timeutils.epoch.EPOCH)
    #processdates = [timebase.number_to_datetime(daynumber) for daynumber in time_indices]
    
    # get times as understood by the analysis sustem
    time_indices =[eustace.timeutils.epoch.days_since_epoch(t) for t in processing_dates]
    
    cell_sampling   = [1, 1]
    blocking = 10

    # thinned set of sample indices for inclusion in output product
    sample_indices = range(definitions.GLOBAL_SAMPLE_SHAPE[3])
    
    climatology_projector = None
    large_scale_projector = None
    local_projector = None

    
    for ( inner_index, time_index, processdate ) in zip( range(len(time_indices)), time_indices, processing_dates ):
        print time_index
        
        # initialise flags
        flag_values = numpy.zeros( definitions.GLOBAL_FIELD_SHAPE[1:], FLAG_TYPE )
        
        # Configure output grid
        outputstructure = OutputRectilinearGridStructure(time_index, processdate,
                                                     latitudes=latitudes,
                                                     longitudes=longitudes)
        
        # climatology component
        print 'Evaluating: climatology'
        if climatology_projector is None:
            climatology_projector = Projector(latitudes, longitudes, grid_resolution, time_index, cell_sampling, blocking)
            climatology_projector.set_component(analysissystem.components[0])
            
            latent_climatology_constraint = evaluate_latent_variable_constraint(climatology_projector)
        
        climatology_projector.update_time_index(time_index, keep_design = False)
        climatology_projector.evaluate_design_matrix()
        
        climatology_statistic = evaluate_constraint_statistic(climatology_projector, latent_climatology_constraint, CONSTRAINT_THRESHOLD).reshape(definitions.GLOBAL_FIELD_SHAPE[1:])

         

        flag_values[climatology_statistic] = flag_values[climatology_statistic] | CLIMATOLOGY_LATENT_FLAG
        
        # large scale component
        print 'Evaluating: large-scale'
        if large_scale_projector is None:
            large_scale_projector = Projector(latitudes, longitudes, grid_resolution, time_index, cell_sampling, blocking)
            large_scale_projector.set_component(analysissystem.components[1])
            
            latent_large_scale_constraint = evaluate_latent_variable_constraint(large_scale_projector)
            
        large_scale_projector.update_time_index(time_index, keep_design = False)
        large_scale_projector.evaluate_design_matrix()
        
        large_scale_statistic = evaluate_constraint_statistic(large_scale_projector, latent_large_scale_constraint, CONSTRAINT_THRESHOLD).reshape(definitions.GLOBAL_FIELD_SHAPE[1:])

        flag_values[large_scale_statistic] = flag_values[large_scale_statistic] | LARGE_SCALE_LATENT_FLAG
        
        outputfile = os.path.join(output_directory, '{:04d}'.format(processdate.year), 'eustace_analysis_{:d}_qc_flags_{:04d}{:02d}{:02d}.nc'.format(iteration, processdate.year, processdate.month, processdate.day))
        save_flag_file(flag_values, processdate, outputfile)