예제 #1
0
    def generate_parameters_cum():

        starting_files.sort()
        # Convert from string to in (for comparison)
        dekad_start = int(start_season)
        dekad_end = int(end_season)

        # Loop over all input files
        for file_t0 in starting_files:
            # Get current date (in format '19980901')
            date_t0 = functions.get_date_from_path_full(file_t0)

            # Extract from date-string the dekad/year as integer
            dekad_t0 = int(date_t0[4:])
            year_t0 = int(date_t0[0:4])
            in_season = False

            # Check if season goes across two years -> define year1/2
            if dekad_start < dekad_end:
                if dekad_t0 >= dekad_start and dekad_t0 <= dekad_end:
                    year_sos = year_t0
                    in_season = True
            else:
                if dekad_t0 >= dekad_start:
                    year_sos = year_t0
                    in_season = True
                if dekad_t0 <= dekad_end:
                    year_sos = year_t0 - 1
                    in_season = True

            # Detect the end of the season and trigger processing
            if in_season:

                # Define output filename
                output_file = es2_data_dir + subdir_ident_cum + date_t0 + prod_ident_cum

                # Get list of dates from start of season to end of season
                list_dates = proc_functions.get_list_dates_for_dataset(
                    prod,
                    starting_sprod,
                    version,
                    start_date=str(year_sos) + start_season,
                    end_date=date_t0)
                input_files = []
                missing_file = False
                for ldate in list_dates:
                    # Append the file to list if it exists ...
                    if os.path.isfile(input_dir + ldate + in_prod_ident):
                        input_files.append(input_dir + ldate + in_prod_ident)
                    # ... otherwise raise a warning and break
                    else:
                        logger.warning(
                            'Missing file for date {0}. Season not computed.'.
                            format(ldate))
                        missing_file = True
                        break

                if not missing_file:
                    yield (input_files, output_file)
예제 #2
0
 def get_filenames_range(self):
     all_files = glob.glob(os.path.join(self.fullpath, "*"))
     filenames = []
     for file in all_files:
         str_date = functions.get_date_from_path_full(file)
         my_date=datetime.date(int(str_date[0:4]),int(str_date[4:6]), int(str_date[6:8]))
         if my_date >= self.from_date and my_date <= self.to_date:
             filenames.append((file))
     return filenames
def compute_monthly_prod_from_10d(input_file, output_file):
    # ES2- 235 Do not show temporary products like composite not complete (ex monthly composite available mid month...)
    input_file_date = functions.get_date_from_path_full(input_file[0])
    if len(input_file) == 3:
        if not functions.is_date_current_month(input_file_date):
            output_file = functions.list_to_element(output_file)
            functions.check_output_dir(os.path.dirname(output_file))
            args = {
                "input_file": input_file,
                "output_file": output_file,
                "output_format": 'GTIFF',
                "options": "compress = lzw"
            }
            raster_image_math.do_avg_image(**args)
예제 #4
0
def generate_parameters_ndvi_linearx1():

        #   Look for all input files in input_dir, and sort them
        input_files = glob.glob(starting_files)
        input_files.sort()

        for file_t0 in input_files:
            # Get current date
            date_t0 = functions.get_date_from_path_full(file_t0)
            output_file = es_constants.processing_dir+subdir_linearx1+str(date_t0)+prod_ident_linearx1

            # Get files at t-1 and t+1
            adjac_files = functions.files_temp_ajacent(file_t0)

            if len(adjac_files) == 2:

                # Prepare and return arguments
                three_files_in_a_row = [adjac_files[0], file_t0, adjac_files[1]]
                yield (three_files_in_a_row, output_file)
예제 #5
0
def generate_parameters_ndvi_linearx1():

    #   Look for all input files in input_dir, and sort them
    input_files = glob.glob(starting_files)
    input_files.sort()

    for file_t0 in input_files:
        # Get current date
        date_t0 = functions.get_date_from_path_full(file_t0)
        output_file = es_constants.processing_dir + subdir_linearx1 + str(
            date_t0) + prod_ident_linearx1

        # Get files at t-1 and t+1
        adjac_files = functions.files_temp_ajacent(file_t0)

        if len(adjac_files) == 2:

            # Prepare and return arguments
            three_files_in_a_row = [adjac_files[0], file_t0, adjac_files[1]]
            yield (three_files_in_a_row, output_file)
예제 #6
0
    def std_precip_1moncum(input_file, output_file):
        #ES2- 235 Do not show temporary products like composite not complete (ex monthly composite available mid month...)
        # ex: monthly RFE in the middle of the month should not be available because incomplete and lead to wrong analysis...
        # Check current month  ---> yes  ---> skip
        #                      ----> NO   ---> Check No of days (10% tolerance)
        #                                       acceptable ---->
        #                                                   Yes ---> proceed
        #                                                   No ----> Skip
        input_file_date = functions.get_date_from_path_full(input_file[0])

        if len(input_file) == 3:
            if not functions.is_date_current_month(input_file_date):
                output_file = functions.list_to_element(output_file)
                functions.check_output_dir(os.path.dirname(output_file))
                args = {
                    "input_file": input_file,
                    "output_file": output_file,
                    "output_format": 'GTIFF',
                    "options": "compress=lzw"
                }
                raster_image_math.do_cumulate(**args)
예제 #7
0
    def rain_onset(input_files, output_file):
        output_file = functions.list_to_element(output_file)
        functions.check_output_dir(os.path.dirname(output_file))

        # Need to define the current_dekad number, wrt begin of season
        current_date = functions.get_date_from_path_full(output_file)
        current_dekad = current_date[4:]
        dekad_number = functions.dekad_nbr_in_season(current_dekad,
                                                     start_season)

        # Call the function
        args = {
            "input_file": input_files,
            "output_file": output_file,
            'input_nodata': None,
            'output_nodata': None,
            'output_type': 'Int16',
            "output_format": 'GTIFF',
            "options": "compress = lzw",
            'current_dekad': dekad_number
        }
        raster_image_math.do_rain_onset(**args)
예제 #8
0
def syncGeoserver():
    #
    #   Copy some 'relevant' datasets to GeoServer
    #   Selection of datasets is done on the basis of the product.geoserver table
    #

    # Get list of all 'relevant' subproducts (see 2. above)
    list_active_geoserver = esTools.get_activated_geoserver()

    # Loop over existing sub_products
    for geoserver_sprod in list_active_geoserver:

        # Extract local variable:
        my_prod = geoserver_sprod.productcode
        my_subprod = geoserver_sprod.subproductcode
        my_version = geoserver_sprod.version
        start_date = geoserver_sprod.startdate
        end_date = geoserver_sprod.enddate

        logger.info("Working on Product/Subproduct/Version: {0}/{1}/{2}".format(my_prod, my_subprod, my_version))

        # Manage dates from bigint to datetime
        if functions.is_date_yyyymmdd(str(start_date), silent=True):
            date_start = datetime.datetime.strptime(str(start_date), '%Y%m%d').date()
        else:
            date_start = None

        if functions.is_date_yyyymmdd(str(end_date), silent=True):
            date_end = datetime.datetime.strptime(str(end_date), '%Y%m%d').date()
        else:
            date_end = None

        # Get additional products info
        product_info = querydb.get_product_out_info(productcode=my_prod,
                                                    subproductcode=my_subprod,
                                                    version=my_version)

        # my_mapset   = subprod.mapsetcode
        my_type = product_info[0].product_type
        my_category = product_info[0].category_id

        # Create a Product object (to get mapsets)
        my_product = products.Product(my_prod, version=my_version)
        my_mapsets = my_product.mapsets

        if len(my_mapsets) > 1:
            logger.info('More than 1 mapset exists. Take the first')

        if len(my_mapsets) == 0:
            logger.warning('No any mapset exists. Skip.')
            continue

        my_mapset = my_mapsets[0]

        # Create a Dataset object (to get file list)
        # If data_start is not set (e.g. for 10davg prod) create w/o dates
        if date_start:
            my_dataset = datasets.Dataset(my_prod, my_subprod, my_mapset, version=my_version, from_date=date_start,
                                          to_date=date_end)
            if my_dataset._frequency.dateformat == 'MMDD':
                logger.warning('Product of type MMDD: date specification not supported. Skip.')
                continue
            file_list = my_dataset.get_filenames_range()
        else:
            my_dataset = datasets.Dataset(my_prod, my_subprod, my_mapset, version=my_version)
            file_list = my_dataset.get_filenames()

        # Check that there is at least 1 file
        if len(file_list) > 0:
            # Check the Workspace exists, or create it
            my_workspace = esTools.setWorkspaceName(my_category, my_prod, my_subprod, my_version, my_mapset,
                                                    nameType=geoserverREST.geoserverWorkspaceName)

            if not geoserverREST.isWorkspace(my_workspace):
                geoserverREST.createWorkspace(my_workspace)

            # Loop over files and upload
            for my_file in file_list:
                my_date = functions.get_date_from_path_full(my_file)

                # if subprod in list_active_subprods:
                logger.debug("Working on Product/Subproduct/Version/Mapset/Date: {0}/{1}/{2}/{3}/{4}".format(
                    my_prod, my_subprod, my_version, my_mapset, my_date))

                # Upload the file and register
                esTools.uploadAndRegisterRaster(my_category, my_prod, my_subprod, my_version, my_mapset, my_date,
                                                my_type, local_data_dir)
예제 #9
0
    def generate_parameters_onset():

        starting_files.sort()

        for file_t0 in starting_files:
            # Get current date
            date_t0 = functions.get_date_from_path_full(file_t0)
            # Check if we are in the seasonal range [start < current <= end]
            dekad_t0 = int(date_t0[4:])
            dekad_start = int(start_season)
            dekad_second = int(second_dekad)
            dekad_end = int(end_season)

            # Initialize processing to 0
            do_proc = 0
            in_season = False

            # Check we are within the season -> do_proc
            if dekad_start < dekad_end:
                if dekad_t0 > dekad_start and dekad_t0 <= dekad_end:
                    in_season = True
            else:
                if dekad_t0 > dekad_start or dekad_t0 <= dekad_end:
                    in_season = True
            if in_season and (dekad_t0 == dekad_second):
                do_proc = 1
            if in_season and (dekad_t0 != dekad_second):
                do_proc = 2

            if do_proc:

                output_file = es2_data_dir + subdir_onset + str(
                    date_t0) + prod_ident_onset
                # Get files at t-1 and t-2 (if they exist)
                previous_files = functions.previous_files(file_t0)

                # Check if at least one previous file has been identified
                if do_proc == 1:

                    # Check at least 1 previous file exist
                    if len(previous_files) < 1:
                        print('Error Case 1: no any previous file')
                    else:
                        # Pass two arguments (t0 and t0-1)
                        input_files = [file_t0, previous_files[0]]
                        yield (input_files, output_file)

                elif do_proc == 2:

                    error = False
                    # Check 2 previous files exist
                    if len(previous_files) < 2:
                        print('Error Case 2: a previous file is missing')
                        error = True

                    # Look for previous output
                    previous_outputs = functions.previous_files(output_file)

                    if len(previous_outputs) < 1:
                        print('Error Case 2: the previous output is missing')
                        error = True

                    # Pass four arguments (t0, t0-1, t0-2 and output-1)
                    if not error:
                        previous_output = previous_outputs[0]
                        if os.path.isfile(previous_output):
                            input_files = [
                                file_t0, previous_files[0], previous_files[1],
                                previous_output
                            ]
                            yield (input_files, output_file)
예제 #10
0
    def std_precip_1d_gRf(input_file, output_file):

        functions.check_output_dir(os.path.dirname(output_file))
        my_date = functions.get_date_from_path_full(input_file)
        layer = my_date + in_prod_ident_noext

        mapset_obj = MapSet()
        mapset_obj.assigndb(mapset)

        # Prepare tmpfile
        output_file_tmp = tmpdir + os.path.basename(output_file)

        # Extract info from mapset
        size_x = mapset_obj.size_x
        size_y = mapset_obj.size_y
        geo_transform = mapset_obj.geo_transform
        pixel_shift_x = geo_transform[1]
        pixel_shift_y = geo_transform[5]
        ulx = geo_transform[0]
        uly = geo_transform[3]
        lrx = ulx + pixel_shift_x * size_x
        lry = uly + pixel_shift_y * size_y

        txe = str(ulx) + ' ' + str(lrx)
        tye = str(uly) + ' ' + str(lry)
        te = str(ulx) + ' ' + str(lry) + ' ' + str(lrx) + ' ' + str(uly)
        tr = str(pixel_shift_x) + ' ' + str(pixel_shift_x)
        outsize = str(size_x) + ' ' + str(size_y)

        # Interpolate at the original resolution (no outsize)
        command = 'gdal_grid '\
                + ' -ot Float32 -of GTiff -co "compress=LZW" ' \
                + ' -txe ' + txe\
                + ' -tye ' + tye\
                + ' -zfield precipitat '\
                + ' -l '+layer \
                + ' -a invdist:power=2.0:smooting=1:radius1=0.0:radius2=0.0:angle=0.0:max_points=0:min_points=0:nodata:0.0 '\
                + input_file +' '+output_file_tmp
        try:
            os.system(command)
        except:
            pass

        # Interpolate at the original resolution
        command = 'gdalwarp '\
                + '-t_srs "EPSG:4326" '\
                + ' -of GTiff -co "compress=LZW" ' \
                + ' -te ' + te\
                + ' -tr ' + tr+ ' '\
                + output_file_tmp +' '+output_file
        # + ' -ts ' + outsize \

        try:
            print command
            os.system(command)
        except:
            pass
        try:
            shutil.rmtree(tmpdir)
        except:
            print('Error in removing temporary directory. Continue')
            raise NameError('Error in removing tmpdir')
예제 #11
0
    def test_get_date_from_path_full(self):
        my_date = functions.get_date_from_path_full(self.fullpath)

        self.assertEqual(my_date, self.str_date)
from lib.python.image_proc import raster_image_math
from glob import *
from lib.python import es_logging as log
from lib.python import functions
import os

logger = log.my_logger(__name__)

if __name__ == '__main__':

    files_dir = '/data/processing/modis-sst/v2013.1/MODIS-Africa-4km/derived/monavg/'
    files = glob(files_dir + '*.tif')

    input_file_dir = '/data/processing/modis-firms/v6.0/SPOTV-Africa-1km/derived/10dcount/'

    for myfile in sorted(files):
        print('Working on file: ' + myfile)
        try:
            date = functions.get_date_from_path_full(myfile)
            # input_file = glob(input_file_dir+date+'*.tif')
            input_file = [
                "/data/processing/modis-sst/v2013.1/MODIS-Africa-4km/derived/monavg/20181101_modis-sst_monavg_MODIS-Africa-4km_v2013.1.tif"
            ]
            if not os.path.isfile(input_file[0]):
                print('No input file found for: ' + myfile)
            else:
                raster_image_math.assign_metadata_processing(
                    input_file, myfile)
        except:
            print('Error in processing file: ' + myfile)