Exemple #1
0
	def __init__(self, mainwindow):
		QtGui.QWidget.__init__(self, mainwindow)
		# setup ui
		self.ui = Ui_Settings()
		self.ui.setupUi(self)
		# parents
		self.mainwindow = mainwindow
		# get namespace
		self.namespace = utils.read_ini()
		self.module_name = self.namespace['project_structure'][0]
		self.assignments = self.mainwindow.ui.comboBox.currentText()
		# init Tag
		tags = glob.glob(self.get_config_path(self.assignments, None))
		"""
		if self.assignments == self.namespace['process_HF']:
			tags.append("darkcal.ini")
		"""
		if len(tags)>0:
			self.ui.comboBox.addItem("")
			tags = [tmp.split('/')[-1] for tmp in tags]
			for tag in tags:
				self.ui.comboBox.addItem(self.extract_tag(tag))
		self.ui.comboBox.setCurrentIndex(0)
		self.ui.comboBox.currentIndexChanged.connect(self.selectionchange)
		# bind save button
		self.connect(self.ui.pushButton_4, QtCore.SIGNAL(("clicked()")), self.save)
		self.connect(self.ui.pushButton_5, QtCore.SIGNAL(("clicked()")), self.close)
		# setup ui & parameters
		self.initui(self.assignments)
Exemple #2
0
	def __init__(self, parent=None):
		QtGui.QWidget.__init__(self, parent)
		# setup ui
		self.ui = Ui_MainWindow()
		self.ui.setupUi(self)
		# read namespace
		self.namespace = utils.read_ini()
		# other attributes
		self.dirname = None
		self.datapath = None
		self.jss = None           # None, or PBS/LSF
		self.datapathtype = True  # True: dir--runs_dir--datafile  False: dir--datafile
		self.data_format = None  # see self.namespace['data_format']
		self.num_running_jobs = 0
		# process_data is table infomation, the keys are run number, and first column stores raw data path
		# , other columns are consistent with tableWidget
		self.columnCount = self.ui.tableWidget.columnCount()
		self.process_data = None
		self.rawdata_changelog = None
		self.JobCenter = None
		# tag_buffer is {assignments:{run_name:tag_remarks}, ...}
		self.tag_buffer = None
		# setup triggers
		self.ui.tableWidget.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
		self.ui.comboBox_2.currentIndexChanged.connect(self.js_changed)
		self.ui.comboBox.currentIndexChanged.connect(self.assignments_changed)
		self.ui.comboBox_3.currentIndexChanged.connect(self.decomp_changed)
		self.ui.pushButton.clicked.connect(self.view_job)
		# self.ui.pushButton_2.clicked.connect(self.view_history)
		self.ui.pushButton_3.clicked.connect(partial(process.parameters_setting, self))
		self.ui.pushButton_6.clicked.connect(self.refresh_table)
		self.ui.checkBox_3.stateChanged.connect(self.autorefresh)
		self.ui.tableWidget.customContextMenuRequested.connect(self.table_menu)
		self.ui.tableWidget.cellDoubleClicked.connect(self.cell_dclicked)
Exemple #3
0
    def __init__(self, jss, project_root, data_format, main_gui):
        QtGui.QWidget.__init__(self)
        # setup ui
        self.ui = Ui_Run_Dialog()
        self.ui.setupUi(self)
        # father
        self.main_gui = main_gui
        # dict, structure is "jid : ajob"
        self.jobs = {}
        # dict, {'assgn.runname.tag.remarks' : jid}
        self.run_view = {}
        # dict, store jid of waiting jobs, submitted jobs and returned jobs
        self.job_queue = {'waiting':Queue.Queue(JobCenter.JOBQMAX), \
        'submitted':Queue.Queue(JobCenter.JOBQMAX), 'returned':Queue.Queue(JobCenter.JOBQMAX)}
        # locations of run-scripts
        self.python_scripts = utils.get_scripts()
        # submit queue
        self.submit_queue = None
        # tag_remarks_buffer
        self.tag_remarks_buffer = None

        self.jss = jss
        self.rootdir = project_root
        self.data_format = data_format
        self.namespace = utils.read_ini()
        JobCenter.PRE = self.namespace['process_status'][0]
        JobCenter.SUB = self.namespace['process_status'][7]
        JobCenter.RUN = self.namespace['process_status'][1]
        JobCenter.ERR = self.namespace['process_status'][3]
        JobCenter.FIN = self.namespace['process_status'][2]
        JobCenter.TER = self.namespace['process_status'][4]
        # job hub file
        prev_jobs, prev_run_view = self.load_job_hub()
        self.jobs = dict(prev_jobs, **self.jobs)
        self.run_view = dict(prev_run_view, **self.run_view)
        # force overwrite
        self.force_overwrite = False
        # darkcal in h5
        self.darkcal_inh5 = utils.read_config(
            os.path.join(self.rootdir, self.namespace['project_structure'][0],
                         'config/darkcal.ini'), ['darkcal', 'inh5'])

        # triggers
        # self.ui.comboBox.currentIndexChanged.connect(self.tag_changed)
        self.connect(self.ui.pushButton, QtCore.SIGNAL(("clicked()")),
                     self.darkcal_dir)
        self.connect(self.ui.pushButton_3, QtCore.SIGNAL(("clicked()")),
                     self.run)
        self.connect(self.ui.pushButton_2, QtCore.SIGNAL(("clicked()")),
                     self.cancel)
        self.ui.comboBox.currentIndexChanged.connect(self.config_change)
Exemple #4
0
 def __init__(self, parent=None):
     QtGui.QWidget.__init__(self, parent)
     # setup ui
     self.ui = Ui_StartWindow()
     self.ui.setupUi(self)
     self.ui.lineEdit.setText('')
     self.ui.lineEdit_2.setText('')
     self.ui.comboBox.setCurrentIndex(0)
     # setup workdir browser
     self.connect(self.ui.pushButton, QtCore.SIGNAL(("clicked()")),
                  self.workdir)
     self.connect(self.ui.pushButton_2, QtCore.SIGNAL(("clicked()")),
                  self.OK)
     self.connect(self.ui.pushButton_3, QtCore.SIGNAL(("clicked()")),
                  self.datadir)
     # other attributes
     self.dirname = None
     self.datadir = None
     self.jss = None
     self.format_index = None
     self.subDir = False
     self.job_control = None
     # read namespace
     self.namespace = utils.read_ini()
     self.job_control = [
         self.namespace['process_pat_per_job'],
         self.namespace['max_jobs_per_run']
     ]
     # set jss ui
     for jss in self.namespace['JSS_support']:
         self.ui.comboBox.addItem(jss)
     # set up data format
     for fmt in self.namespace['data_format']:
         self.ui.comboBox_2.addItem(fmt)
     # set mainapp
     self.mainapp = SPIPY_MAIN()
Exemple #5
0
def main(ini_path=None,
         overwrite_flag=False,
         delay_time=0,
         gee_key_file=None,
         max_ready=-1,
         reverse_flag=False):
    """Compute monthly Tcorr images from scene images

    Parameters
    ----------
    ini_path : str
        Input file path.
    overwrite_flag : bool, optional
        If True, overwrite existing files (the default is False).
    delay_time : float, optional
        Delay time in seconds between starting export tasks (or checking the
        number of queued tasks, see "max_ready" parameter).  The default is 0.
    gee_key_file : str, None, optional
        Earth Engine service account JSON key file (the default is None).
    max_ready: int, optional
        Maximum number of queued "READY" tasks.  The default is -1 which is
        implies no limit to the number of tasks that will be submitted.
    reverse_flag : bool, optional
        If True, process WRS2 tiles in reverse order.

    """
    logging.info('\nCompute monthly Tcorr images from scene images')

    ini = utils.read_ini(ini_path)

    model_name = 'SSEBOP'
    # model_name = ini['INPUTS']['et_model'].upper()

    tmax_name = ini[model_name]['tmax_source']

    export_id_fmt = 'tcorr_scene_{product}_{wrs2}_month{month:02d}_from_scene'
    asset_id_fmt = '{coll_id}/{wrs2}_month{month:02d}'

    tcorr_monthly_coll_id = '{}/{}_monthly_from_scene'.format(
        ini['EXPORT']['export_coll'], tmax_name.lower())

    wrs2_coll_id = 'projects/earthengine-legacy/assets/' \
                   'projects/usgs-ssebop/wrs2_descending_custom'
    wrs2_tile_field = 'WRS2_TILE'
    # wrs2_path_field = 'ROW'
    # wrs2_row_field = 'PATH'

    try:
        wrs2_tiles = str(ini['INPUTS']['wrs2_tiles'])
        wrs2_tiles = [x.strip() for x in wrs2_tiles.split(',')]
        wrs2_tiles = sorted([x.lower() for x in wrs2_tiles if x])
    except KeyError:
        wrs2_tiles = []
        logging.debug('  wrs2_tiles: not set in INI, defaulting to []')
    except Exception as e:
        raise e

    try:
        study_area_extent = str(ini['INPUTS']['study_area_extent']) \
            .replace('[', '').replace(']', '').split(',')
        study_area_extent = [float(x.strip()) for x in study_area_extent]
    except KeyError:
        study_area_extent = None
        logging.debug('  study_area_extent: not set in INI')
    except Exception as e:
        raise e

    # TODO: Add try/except blocks and default values?
    # TODO: Filter Tcorr scene collection based on collections parameter
    # collections = [x.strip() for x in ini['INPUTS']['collections'].split(',')]
    cloud_cover = float(ini['INPUTS']['cloud_cover'])
    min_pixel_count = float(ini['TCORR']['min_pixel_count'])
    min_scene_count = float(ini['TCORR']['min_scene_count'])

    if (tmax_name.upper() == 'CIMIS'
            and ini['INPUTS']['end_date'] < '2003-10-01'):
        logging.error(
            '\nCIMIS is not currently available before 2003-10-01, exiting\n')
        sys.exit()
    elif (tmax_name.upper() == 'DAYMET'
          and ini['INPUTS']['end_date'] > '2018-12-31'):
        logging.warning('\nDAYMET is not currently available past 2018-12-31, '
                        'using median Tmax values\n')
        # sys.exit()
    # elif (tmax_name.upper() == 'TOPOWX' and
    #         ini['INPUTS']['end_date'] > '2017-12-31'):
    #     logging.warning(
    #         '\nDAYMET is not currently available past 2017-12-31, '
    #         'using median Tmax values\n')
    #     # sys.exit()

    logging.info('\nInitializing Earth Engine')
    if gee_key_file:
        logging.info(
            '  Using service account key file: {}'.format(gee_key_file))
        # The "EE_ACCOUNT" parameter is not used if the key file is valid
        ee.Initialize(ee.ServiceAccountCredentials('x', key_file=gee_key_file),
                      use_cloud_api=True)
    else:
        ee.Initialize(use_cloud_api=True)

    logging.debug('\nTmax properties')
    tmax_source = tmax_name.split('_', 1)[0]
    tmax_version = tmax_name.split('_', 1)[1]
    tmax_coll_id = 'projects/earthengine-legacy/assets/' \
                   'projects/usgs-ssebop/tmax/{}'.format(tmax_name.lower())
    tmax_coll = ee.ImageCollection(tmax_coll_id)
    tmax_mask = ee.Image(tmax_coll.first()).select([0]).multiply(0)
    logging.debug('  Collection: {}'.format(tmax_coll_id))
    logging.debug('  Source: {}'.format(tmax_source))
    logging.debug('  Version: {}'.format(tmax_version))

    # Get the Tcorr scene image collection properties
    logging.debug('\nTcorr scene collection')
    tcorr_scene_coll_id = '{}/{}_scene'.format(ini['EXPORT']['export_coll'],
                                               tmax_name.lower())

    logging.debug('\nExport properties')
    export_info = utils.get_info(ee.Image(tmax_mask))
    if 'daymet' in tmax_name.lower():
        # Custom smaller extent for DAYMET focused on CONUS
        export_extent = [-1999750, -1890500, 2500250, 1109500]
        export_shape = [4500, 3000]
        export_geo = [1000, 0, -1999750, 0, -1000, 1109500]
        # Custom medium extent for DAYMET of CONUS, Mexico, and southern Canada
        # export_extent = [-2099750, -3090500, 2900250, 1909500]
        # export_shape = [5000, 5000]
        # export_geo = [1000, 0, -2099750, 0, -1000, 1909500]
        export_crs = export_info['bands'][0]['crs']
    else:
        export_crs = export_info['bands'][0]['crs']
        export_geo = export_info['bands'][0]['crs_transform']
        export_shape = export_info['bands'][0]['dimensions']
        # export_geo = ee.Image(tmax_mask).projection().getInfo()['transform']
        # export_crs = ee.Image(tmax_mask).projection().getInfo()['crs']
        # export_shape = ee.Image(tmax_mask).getInfo()['bands'][0]['dimensions']
        export_extent = [
            export_geo[2], export_geo[5] + export_shape[1] * export_geo[4],
            export_geo[2] + export_shape[0] * export_geo[0], export_geo[5]
        ]
    export_geom = ee.Geometry.Rectangle(export_extent,
                                        proj=export_crs,
                                        geodesic=False)
    logging.debug('  CRS: {}'.format(export_crs))
    logging.debug('  Extent: {}'.format(export_extent))
    logging.debug('  Geo: {}'.format(export_geo))
    logging.debug('  Shape: {}'.format(export_shape))

    if study_area_extent is None:
        if 'daymet' in tmax_name.lower():
            # CGM - For now force DAYMET to a slightly smaller "CONUS" extent
            study_area_extent = [-125, 25, -65, 49]
            # study_area_extent =  [-125, 25, -65, 52]
        elif 'cimis' in tmax_name.lower():
            study_area_extent = [-124, 35, -119, 42]
        else:
            # TODO: Make sure output from bounds is in WGS84
            study_area_extent = tmax_mask.geometry().bounds().getInfo()
        logging.debug(f'\nStudy area extent not set in INI, '
                      f'default to {study_area_extent}')
    study_area_geom = ee.Geometry.Rectangle(study_area_extent,
                                            proj='EPSG:4326',
                                            geodesic=False)

    if not ee.data.getInfo(tcorr_monthly_coll_id):
        logging.info('\nExport collection does not exist and will be built'
                     '\n  {}'.format(tcorr_monthly_coll_id))
        input('Press ENTER to continue')
        ee.data.createAsset({'type': 'IMAGE_COLLECTION'},
                            tcorr_monthly_coll_id)

    # Get current asset list
    logging.debug('\nGetting GEE asset list')
    asset_list = utils.get_ee_assets(tcorr_monthly_coll_id)
    # if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
    #     pprint.pprint(asset_list[:10])

    # Get current running tasks
    tasks = utils.get_ee_tasks()
    if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
        logging.debug('  Tasks: {}\n'.format(len(tasks)))
        input('ENTER')

    # Limit by year and month
    try:
        month_list = sorted(list(utils.parse_int_set(ini['TCORR']['months'])))
    except:
        logging.info('\nTCORR "months" parameter not set in the INI,'
                     '\n  Defaulting to all months (1-12)\n')
        month_list = list(range(1, 13))
    try:
        year_list = sorted(list(utils.parse_int_set(ini['TCORR']['years'])))
    except:
        logging.info('\nTCORR "years" parameter not set in the INI,'
                     '\n  Defaulting to all available years\n')
        year_list = []

    # Get the list of WRS2 tiles that intersect the data area and study area
    wrs2_coll = ee.FeatureCollection(wrs2_coll_id) \
        .filterBounds(export_geom) \
        .filterBounds(study_area_geom)
    if wrs2_tiles:
        wrs2_coll = wrs2_coll.filter(
            ee.Filter.inList(wrs2_tile_field, wrs2_tiles))
    wrs2_info = wrs2_coll.getInfo()['features']

    for wrs2_ftr in sorted(wrs2_info,
                           key=lambda k: k['properties']['WRS2_TILE'],
                           reverse=reverse_flag):
        wrs2_tile = wrs2_ftr['properties'][wrs2_tile_field]
        logging.info('{}'.format(wrs2_tile))

        wrs2_path = int(wrs2_tile[1:4])
        wrs2_row = int(wrs2_tile[5:8])
        # wrs2_path = wrs2_ftr['properties'][wrs2_path_field]
        # wrs2_row = wrs2_ftr['properties'][wrs2_row_field]

        for month in month_list:
            logging.info('Month: {}'.format(month))

            export_id = export_id_fmt.format(product=tmax_name.lower(),
                                             wrs2=wrs2_tile,
                                             month=month)
            logging.debug('  Export ID: {}'.format(export_id))

            asset_id = asset_id_fmt.format(coll_id=tcorr_monthly_coll_id,
                                           wrs2=wrs2_tile,
                                           month=month)
            logging.debug('  Asset ID: {}'.format(asset_id))

            if overwrite_flag:
                if export_id in tasks.keys():
                    logging.debug('  Task already submitted, cancelling')
                    ee.data.cancelTask(tasks[export_id]['id'])
                # This is intentionally not an "elif" so that a task can be
                # cancelled and an existing image/file/asset can be removed
                if asset_id in asset_list:
                    logging.debug('  Asset already exists, removing')
                    ee.data.deleteAsset(asset_id)
            else:
                if export_id in tasks.keys():
                    logging.debug('  Task already submitted, exiting')
                    continue
                elif asset_id in asset_list:
                    logging.debug('  Asset already exists, skipping')
                    continue

            tcorr_coll = ee.ImageCollection(tcorr_scene_coll_id) \
                .filterMetadata('wrs2_tile', 'equals', wrs2_tile) \
                .filterMetadata('tcorr_pixel_count', 'not_less_than', min_pixel_count) \
                .filter(ee.Filter.calendarRange(month, month, 'month')) \
                .filter(ee.Filter.inList('year', year_list))
            # TODO: Should CLOUD_COVER_LAND filter should be re-applied here?
            #     .filterMetadata('CLOUD_COVER_LAND', 'less_than', cloud_cover) \
            #     .filterDate(start_date, end_date)
            #     .filterBounds(ee.Geometry(wrs2_ftr['geometry']))

            # Use a common reducer for the image and property stats
            reducer = ee.Reducer.median() \
                .combine(ee.Reducer.count(), sharedInputs=True)

            # Compute stats from the collection images
            # This might be used when Tcorr is spatial
            # tcorr_img = tcorr_coll.reduce(reducer).rename(['tcorr', 'count'])

            # Compute stats from the image properties
            tcorr_stats = ee.List(tcorr_coll.aggregate_array('tcorr_value')) \
                .reduce(reducer)
            tcorr_stats = ee.Dictionary(tcorr_stats) \
                .combine({'median': 0, 'count': 0}, overwrite=False)
            tcorr = ee.Number(tcorr_stats.get('median'))
            count = ee.Number(tcorr_stats.get('count'))
            index = count.lt(min_scene_count).multiply(8).add(1)
            # index = ee.Algorithms.If(count.gte(min_scene_count), 1, 9)

            # Clip the mask image to the Landsat footprint
            # Change mask values to 1 if count >= threshold
            # Mask values of 0 will be set to nodata
            mask_img = tmax_mask.add(count.gte(min_scene_count)) \
                .clip(ee.Geometry(wrs2_ftr['geometry']))
            output_img = ee.Image(
                    [mask_img.multiply(tcorr), mask_img.multiply(count)]) \
                .rename(['tcorr', 'count']) \
                .updateMask(mask_img.unmask(0))

            # # Write an empty image if the pixel count is too low
            # # CGM: Check/test if this can be combined into a single If()
            # tcorr_img = ee.Algorithms.If(
            #     count.gte(min_scene_count),
            #     tmax_mask.add(tcorr), tmax_mask.updateMask(0))
            # count_img = ee.Algorithms.If(
            #     count.gte(min_scene_count),
            #     tmax_mask.add(count), tmax_mask.updateMask(0))
            #
            # # Clip to the Landsat image footprint
            # output_img = ee.Image([tcorr_img, count_img]) \
            #     .rename(['tcorr', 'count']) \
            #     .clip(ee.Geometry(wrs2_ftr['geometry']))
            # # Clear the transparency mask
            # output_img = output_img.updateMask(output_img.unmask(0))

            output_img = output_img.set({
                'date_ingested':
                datetime.datetime.today().strftime('%Y-%m-%d'),
                'model_name':
                model_name,
                'model_version':
                ssebop.__version__,
                'month':
                int(month),
                # 'system:time_start': utils.millis(start_dt),
                'tcorr_value':
                tcorr,
                'tcorr_index':
                index,
                'tcorr_scene_count':
                count,
                'tmax_source':
                tmax_source.upper(),
                'tmax_version':
                tmax_version.upper(),
                'wrs2_path':
                wrs2_path,
                'wrs2_row':
                wrs2_row,
                'wrs2_tile':
                wrs2_tile,
                'years':
                ','.join(map(str, year_list)),
                # 'year_start': year_list[0],
                # 'year_end': year_list[-1],
            })
            # pprint.pprint(output_img.getInfo())
            # input('ENTER')

            logging.debug('  Building export task')
            task = ee.batch.Export.image.toAsset(
                image=output_img,
                description=export_id,
                assetId=asset_id,
                crs=export_crs,
                crsTransform='[' + ','.join(list(map(str, export_geo))) + ']',
                dimensions='{0}x{1}'.format(*export_shape),
            )

            logging.info('  Starting export task')
            utils.ee_task_start(task)

            # Pause before starting the next export task
            utils.delay_task(delay_time, max_ready)
            logging.debug('')
Exemple #6
0
def main(ini_path=None, overwrite_flag=False, delay_time=0, gee_key_file=None,
         max_ready=-1):
    """Compute monthly Tcorr images

    Parameters
    ----------
    ini_path : str
        Input file path.
    overwrite_flag : bool, optional
        If True, overwrite existing files (the default is False).
    delay_time : float, optional
        Delay time in seconds between starting export tasks (or checking the
        number of queued tasks, see "max_ready" parameter).  The default is 0.
    gee_key_file : str, None, optional
        Earth Engine service account JSON key file (the default is None).
    max_ready: int, optional
        Maximum number of queued "READY" tasks.  The default is -1 which is
        implies no limit to the number of tasks that will be submitted.

    """
    logging.info('\nCompute monthly Tcorr images')

    ini = utils.read_ini(ini_path)

    model_name = 'SSEBOP'
    # model_name = ini['INPUTS']['et_model'].upper()

    tmax_name = ini[model_name]['tmax_source']

    export_id_fmt = 'tcorr_image_{product}_month{month:02d}_cycle{cycle:02d}_test'
    asset_id_fmt = '{coll_id}/{month:02d}_cycle{cycle:02d}'

    tcorr_monthly_coll_id = '{}/{}_monthly_test'.format(
        ini['EXPORT']['export_coll'], tmax_name.lower())

    wrs2_coll_id = 'projects/earthengine-legacy/assets/' \
                   'projects/usgs-ssebop/wrs2_descending_custom'

    if (tmax_name.upper() == 'CIMIS' and
            ini['INPUTS']['end_date'] < '2003-10-01'):
        logging.error(
            '\nCIMIS is not currently available before 2003-10-01, exiting\n')
        sys.exit()
    elif (tmax_name.upper() == 'DAYMET' and
            ini['INPUTS']['end_date'] > '2018-12-31'):
        logging.warning(
            '\nDAYMET is not currently available past 2018-12-31, '
            'using median Tmax values\n')
        # sys.exit()
    # elif (tmax_name.upper() == 'TOPOWX' and
    #         ini['INPUTS']['end_date'] > '2017-12-31'):
    #     logging.warning(
    #         '\nDAYMET is not currently available past 2017-12-31, '
    #         'using median Tmax values\n')
    #     # sys.exit()

    # Extract the model keyword arguments from the INI
    # Set the property name to lower case and try to cast values to numbers
    model_args = {
        k.lower(): float(v) if utils.is_number(v) else v
        for k, v in dict(ini[model_name]).items()}
    # et_reference_args = {
    #     k: model_args.pop(k)
    #     for k in [k for k in model_args.keys() if k.startswith('et_reference_')]}

    logging.info('\nInitializing Earth Engine')
    if gee_key_file:
        logging.info('  Using service account key file: {}'.format(gee_key_file))
        # The "EE_ACCOUNT" parameter is not used if the key file is valid
        ee.Initialize(ee.ServiceAccountCredentials('x', key_file=gee_key_file))
    else:
        ee.Initialize()

    logging.debug('\nTmax properties')
    tmax_source = tmax_name.split('_', 1)[0]
    tmax_version = tmax_name.split('_', 1)[1]
    tmax_coll_id = 'projects/earthengine-legacy/assets/' \
                   'projects/usgs-ssebop/tmax/{}'.format(tmax_name.lower())
    tmax_coll = ee.ImageCollection(tmax_coll_id)
    tmax_mask = ee.Image(tmax_coll.first()).select([0]).multiply(0)
    logging.debug('  Collection: {}'.format(tmax_coll_id))
    logging.debug('  Source: {}'.format(tmax_source))
    logging.debug('  Version: {}'.format(tmax_version))

    # Get the Tcorr daily image collection properties
    logging.debug('\nTcorr Image properties')
    tcorr_daily_coll_id = '{}/{}_daily'.format(
        ini['EXPORT']['export_coll'], tmax_name.lower())
    tcorr_img = ee.Image(ee.ImageCollection(tcorr_daily_coll_id).first())
    tcorr_info = utils.get_info(ee.Image(tcorr_img))
    tcorr_geo = tcorr_info['bands'][0]['crs_transform']
    tcorr_crs = tcorr_info['bands'][0]['crs']
    tcorr_shape = tcorr_info['bands'][0]['dimensions']
    # tcorr_geo = ee.Image(tcorr_img).projection().getInfo()['transform']
    # tcorr_crs = ee.Image(tcorr_img).projection().getInfo()['crs']
    # tcorr_shape = ee.Image(tcorr_img).getInfo()['bands'][0]['dimensions']
    tcorr_extent = [tcorr_geo[2], tcorr_geo[5] + tcorr_shape[1] * tcorr_geo[4],
                    tcorr_geo[2] + tcorr_shape[0] * tcorr_geo[0], tcorr_geo[5]]
    logging.debug('  Shape: {}'.format(tcorr_shape))
    logging.debug('  Extent: {}'.format(tcorr_extent))
    logging.debug('  Geo: {}'.format(tcorr_geo))
    logging.debug('  CRS: {}'.format(tcorr_crs))

    if not ee.data.getInfo(tcorr_monthly_coll_id):
        logging.info('\nExport collection does not exist and will be built'
                     '\n  {}'.format(tcorr_monthly_coll_id))
        input('Press ENTER to continue')
        ee.data.createAsset({'type': 'IMAGE_COLLECTION'}, tcorr_monthly_coll_id)

    # Get current asset list
    logging.debug('\nGetting GEE asset list')
    asset_list = utils.get_ee_assets(tcorr_monthly_coll_id)
    if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
        pprint.pprint(asset_list[:10])

    # Get current running tasks
    tasks = utils.get_ee_tasks()
    if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
        logging.debug('  Tasks: {}\n'.format(len(tasks)))
    #     input('ENTER')

    # Limit by year and month
    try:
        month_list = sorted(list(utils.parse_int_set(ini['TCORR']['months'])))
    except:
        logging.info('\nTCORR "months" parameter not set in the INI,'
                     '\n  Defaulting to all months (1-12)\n')
        month_list = list(range(1, 13))
    try:
        year_list = sorted(list(utils.parse_int_set(ini['TCORR']['years'])))
    except:
        logging.info('\nTCORR "years" parameter not set in the INI,'
                     '\n  Defaulting to all available years\n')
        year_list = []

    # Key is cycle day, value is a reference date on that cycle
    # Data from: https://landsat.usgs.gov/landsat_acq
    # I only need to use 8 cycle days because of 5/7 and 7/8 are offset
    cycle_dates = {
        1:  '2000-01-06',
        2:  '2000-01-07',
        3:  '2000-01-08',
        4:  '2000-01-09',
        5:  '2000-01-10',
        6:  '2000-01-11',
        7:  '2000-01-12',
        8:  '2000-01-13',
        # 9:  '2000-01-14',
        # 10: '2000-01-15',
        # 11: '2000-01-16',
        # 12: '2000-01-01',
        # 13: '2000-01-02',
        # 14: '2000-01-03',
        # 15: '2000-01-04',
        # 16: '2000-01-05',
    }

    # Key is cycle day, values are list of paths
    # First list is Landsat 8 paths, second list is Landsat 7 paths
    cycle_paths = {
        5:  [ 1, 17, 33, 49, 65,  81,  97, 106, 122, 138, 154, 170, 186, 202, 218] +
            [ 9, 25, 41, 57, 73,  89,  98, 114, 130, 146, 162, 178, 194, 210, 226],
        # 12: [ 2, 18, 34, 50, 66,  82, 107, 123, 139, 155, 171, 187, 203, 219] +
        #     [10, 26, 42, 58, 74,  99, 115, 131, 147, 163, 179, 195, 211, 227],
        3:  [ 3, 19, 35, 51, 67,  83, 108, 124, 140, 156, 172, 188, 204, 220] +
            [11, 27, 43, 59, 75, 100, 116, 132, 148, 164, 180, 196, 212, 228],
        # 10: [ 4, 20, 36, 52, 68,  84, 109, 125, 141, 157, 171, 189, 205, 221] +
        #     [12, 28, 44, 60, 76, 101, 117, 133, 149, 165, 181, 197, 213, 229],
        1:  [ 5, 21, 37, 53, 69,  85, 110, 126, 142, 158, 174, 190, 206, 222] +
            [13, 29, 45, 61, 77, 102, 118, 134, 150, 166, 182, 198, 214, 230],
        8:  [ 6, 22, 38, 54, 70,  86, 111, 127, 143, 159, 175, 191, 207, 223] +
            [14, 30, 46, 62, 78, 103, 119, 135, 151, 167, 183, 199, 215, 231],
        # 15: [ 7, 23, 39, 55, 71,  87, 112, 128, 144, 160, 176, 192, 208, 224] +
        #     [15, 31, 47, 63, 79, 104, 120, 136, 152, 168, 184, 200, 216, 232],
        6:  [ 8, 24, 40, 56, 72,  88, 113, 129, 145, 161, 177, 193, 209, 225] +
            [16, 32, 48, 64, 80, 105, 121, 137, 153, 169, 185, 201, 217, 233],
        # 13: [ 9, 25, 41, 57, 73,  89,  98, 114, 130, 146, 162, 178, 194, 210, 226] +
        #     [ 1, 17, 33, 49, 65,  81,  90, 106, 122, 138, 154, 170, 186, 202, 218],
        4:  [10, 26, 42, 58, 74,  90,  99, 115, 131, 147, 163, 179, 195, 211, 227] +
            [ 2, 18, 34, 50, 66,  82,  91, 107, 123, 139, 155, 171, 187, 203, 219],
        # 11: [11, 27, 43, 59, 75,  91, 100, 116, 132, 148, 164, 180, 196, 212, 228] +
        #     [ 3, 19, 35, 51, 67,  83,  92, 108, 124, 140, 156, 172, 188, 204, 220],
        2:  [12, 28, 44, 60, 76,  92, 101, 117, 133, 149, 165, 181, 197, 213, 229] +
            [ 4, 20, 36, 52, 68,  84,  93, 109, 125, 141, 157, 173, 189, 205, 221],
        # 9:  [13, 29, 45, 61, 77,  93, 102, 118, 134, 150, 166, 182, 198, 214, 230] +
        #     [ 5, 21, 37, 53, 69,  85,  94, 110, 126, 142, 158, 174, 190, 206, 222],
        # 16: [14, 30, 46, 62, 78,  94, 103, 119, 135, 151, 167, 183, 199, 215, 231] +
        #     [ 6, 22, 38, 54, 70,  86,  95, 111, 127, 143, 159, 175, 191, 207, 223],
        7:  [15, 31, 47, 63, 79,  95, 104, 120, 136, 152, 168, 184, 200, 216, 232] +
            [ 7, 23, 39, 55, 71,  87,  96, 112, 128, 144, 160 ,176, 192, 208, 224],
        # 14: [16, 32, 48, 64, 80,  96, 105, 121, 137, 153, 169, 185, 201, 217, 233] +
        #     [ 8, 24, 40, 56, 72,  88,  97, 113, 129, 145, 161, 177, 193, 209, 225],
    }

    # Iterate over date ranges
    for month in month_list:
        logging.info('\nMonth: {}'.format(month))

        for cycle_day, ref_date in sorted(cycle_dates.items()):
            logging.info('Cycle Day: {}'.format(cycle_day))
            # # DEADBEEF
            # if cycle_day not in [2]:
            #     continue

            ref_dt = datetime.datetime.strptime(ref_date, '%Y-%m-%d')
            logging.debug('  Reference Date: {}'.format(ref_date))

            date_list = sorted(list(utils.date_range(
                datetime.datetime(year_list[0], 1, 1),
                datetime.datetime(year_list[-1], 12, 31))))
            date_list = [
                d.strftime('%Y-%m-%d') for d in date_list
                if ((abs(d - ref_dt).days % 8 == 0) and
                    (int(d.month) == month) and
                    (int(d.year) in year_list))]
            logging.debug('  Dates: {}'.format(', '.join(date_list)))

            export_id = export_id_fmt.format(
                product=tmax_name.lower(), month=month, cycle=cycle_day)
            logging.info('  Export ID: {}'.format(export_id))

            asset_id = asset_id_fmt.format(
                coll_id=tcorr_monthly_coll_id, month=month, cycle=cycle_day)
            logging.info('  Asset ID: {}'.format(asset_id))

            if overwrite_flag:
                if export_id in tasks.keys():
                    logging.debug('  Task already submitted, cancelling')
                    ee.data.cancelTask(tasks[export_id]['id'])
                # This is intentionally not an "elif" so that a task can be
                # cancelled and an existing image/file/asset can be removed
                if asset_id in asset_list:
                    logging.debug('  Asset already exists, removing')
                    ee.data.deleteAsset(asset_id)
            else:
                if export_id in tasks.keys():
                    logging.debug('  Task already submitted, exiting')
                    continue
                elif asset_id in asset_list:
                    logging.debug('  Asset already exists, skipping')
                    continue

            wrs2_coll = ee.FeatureCollection(wrs2_coll_id) \
                .filterBounds(tmax_mask.geometry()) \
                .filter(ee.Filter.inList('PATH', cycle_paths[cycle_day]))
            #     .filter(ee.Filter.inList('PATH', [44]))
            #     .filter(ee.Filter.inList('ROW', [32, 33, 34]))

            def wrs2_tcorr(ftr):
                # Build & merge the Landsat collections for the target path/row
                # Time filters are to remove bad (L5) and pre-op (L8) images
                path = ee.Number(ee.Feature(ftr).get('PATH'))
                row = ee.Number(ee.Feature(ftr).get('ROW'))

                l8_coll = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA') \
                    .filterMetadata('WRS_PATH', 'equals', path) \
                    .filterMetadata('WRS_ROW', 'equals', row) \
                    .filterMetadata('CLOUD_COVER_LAND', 'less_than',
                                    float(ini['INPUTS']['cloud_cover'])) \
                    .filterMetadata('DATA_TYPE', 'equals', 'L1TP') \
                    .filter(ee.Filter.inList('DATE_ACQUIRED', date_list)) \
                    .filter(ee.Filter.gt('system:time_start',
                                         ee.Date('2013-03-24').millis()))
                l7_coll = ee.ImageCollection('LANDSAT/LE07/C01/T1_RT_TOA') \
                    .filterMetadata('WRS_PATH', 'equals', path) \
                    .filterMetadata('WRS_ROW', 'equals', row) \
                    .filterMetadata('CLOUD_COVER_LAND', 'less_than',
                                    float(ini['INPUTS']['cloud_cover'])) \
                    .filterMetadata('DATA_TYPE', 'equals', 'L1TP') \
                    .filter(ee.Filter.inList('DATE_ACQUIRED', date_list))
                l5_coll = ee.ImageCollection('LANDSAT/LT05/C01/T1_TOA') \
                    .filterMetadata('WRS_PATH', 'equals', path) \
                    .filterMetadata('WRS_ROW', 'equals', row) \
                    .filterMetadata('CLOUD_COVER_LAND', 'less_than',
                                    float(ini['INPUTS']['cloud_cover'])) \
                    .filterMetadata('DATA_TYPE', 'equals', 'L1TP')  \
                    .filter(ee.Filter.inList('DATE_ACQUIRED', date_list)) \
                    .filter(ee.Filter.lt('system:time_start',
                                         ee.Date('2011-12-31').millis()))
                l4_coll = ee.ImageCollection('LANDSAT/LT04/C01/T1_TOA') \
                    .filterMetadata('WRS_PATH', 'equals', path) \
                    .filterMetadata('WRS_ROW', 'equals', row) \
                    .filterMetadata('CLOUD_COVER_LAND', 'less_than',
                                    float(ini['INPUTS']['cloud_cover'])) \
                    .filterMetadata('DATA_TYPE', 'equals', 'L1TP') \
                    .filter(ee.Filter.inList('DATE_ACQUIRED', date_list))
                landsat_coll = ee.ImageCollection(
                    l8_coll.merge(l7_coll).merge(l5_coll))
                # landsat_coll = ee.ImageCollection(
                #     l8_coll.merge(l7_coll).merge(l5_coll).merge(l4_coll))

                def tcorr_img_func(image):
                    t_obj = ssebop.Image.from_landsat_c1_toa(
                        ee.Image(image), **model_args)
                    t_stats = ee.Dictionary(t_obj.tcorr_stats) \
                        .combine({'tcorr_value': 0, 'tcorr_count': 0},
                                 overwrite=False)
                    tcorr = ee.Number(t_stats.get('tcorr_value'))
                    count = ee.Number(t_stats.get('tcorr_count'))

                    return tmax_mask.add(ee.Image.constant(tcorr)) \
                        .rename(['tcorr']) \
                        .set({
                            'system:time_start': image.get('system:time_start'),
                            'tcorr': tcorr,
                            'count': count
                        })

                reducer = ee.Reducer.median() \
                    .combine(ee.Reducer.count(), sharedInputs=True)

                # Compute median monthly value for all images in the WRS2 tile
                wrs2_tcorr_coll = ee.ImageCollection(
                        landsat_coll.map(tcorr_img_func)) \
                    .filterMetadata('count', 'not_less_than',
                                    float(ini['TCORR']['min_pixel_count']))

                wrs2_tcorr_img = wrs2_tcorr_coll.reduce(reducer) \
                    .rename(['tcorr', 'count'])

                # Compute stats from the properties also
                wrs2_tcorr_stats = ee.Dictionary(ee.List(
                    wrs2_tcorr_coll.aggregate_array('tcorr')).reduce(reducer))
                wrs2_tcorr_stats = wrs2_tcorr_stats \
                    .combine({'median': 0, 'count': 0}, overwrite=False)

                return wrs2_tcorr_img \
                    .clip(ftr.geometry()) \
                    .set({
                        'wrs2_tile': path.format('%03d').cat(row.format('%03d')),
                        # 'wrs2_tile': ftr.get('WRS2_TILE'),
                        'tcorr': ee.Number(wrs2_tcorr_stats.get('median')),
                        'count': ee.Number(wrs2_tcorr_stats.get('count')),
                        'index': 1,
                    })

            # Combine WRS2 Tcorr monthly images to a single monthly image
            output_img = ee.ImageCollection(wrs2_coll.map(wrs2_tcorr)) \
                .filterMetadata('count', 'not_less_than',
                                float(ini['TCORR']['min_scene_count'])) \
                .mean() \
                .rename(['tcorr', 'count'])

            output_img = ee.Image([
                    tmax_mask.add(output_img.select(['tcorr'])).double(),
                    tmax_mask.add(output_img.select(['count'])).min(250).uint8()]) \
                .rename(['tcorr', 'count']) \
                .set({
                    # 'system:time_start': utils.millis(iter_start_dt),
                    'date_ingested': datetime.datetime.today().strftime('%Y-%m-%d'),
                    'cycle_day': int(cycle_day),
                    'month': int(month),
                    'years': ','.join(map(str, year_list)),
                    'model_name': model_name,
                    'model_version': ssebop.__version__,
                    'tmax_source': tmax_source.upper(),
                    'tmax_version': tmax_version.upper(),
                })

            logging.debug('  Building export task')
            task = ee.batch.Export.image.toAsset(
                image=ee.Image(output_img),
                description=export_id,
                assetId=asset_id,
                crs=tcorr_crs,
                crsTransform='[' + ','.join(list(map(str, tcorr_geo))) + ']',
                dimensions='{0}x{1}'.format(*tcorr_shape),
            )

            logging.debug('  Starting export task')
            utils.ee_task_start(task)

            # Pause before starting the next export task
            utils.delay_task(delay_time, max_ready)
            logging.debug('')
def main(ini_path=None):
    """Remove earlier versions of daily dT images

    Parameters
    ----------
    ini_path : str
        Input file path.

    """
    logging.info('\nRemove earlier versions of daily dT images')

    ini = utils.read_ini(ini_path)

    model_name = 'SSEBOP'
    # model_name = ini['INPUTS']['et_model'].upper()

    start_dt = datetime.datetime.strptime(ini['INPUTS']['start_date'],
                                          '%Y-%m-%d')
    end_dt = datetime.datetime.strptime(ini['INPUTS']['end_date'], '%Y-%m-%d')
    logging.debug('Start Date: {}'.format(start_dt.strftime('%Y-%m-%d')))
    logging.debug('End Date:   {}\n'.format(end_dt.strftime('%Y-%m-%d')))

    try:
        dt_source = str(ini[model_name]['dt_source'])
        logging.debug('\ndt_source:\n  {}'.format(dt_source))
    except KeyError:
        logging.error('  dt_source: must be set in INI')
        sys.exit()
    if dt_source.upper() not in ['CIMIS', 'DAYMET', 'GRIDMET']:
        raise ValueError('dt_source must be CIMIS, DAYMET, or GRIDMET')

    # Output dT daily image collection
    dt_daily_coll_id = '{}/{}_daily'.format(
        ini['EXPORT']['export_coll'], ini[model_name]['dt_source'].lower())
    logging.debug('  {}'.format(dt_daily_coll_id))

    if os.name == 'posix':
        shell_flag = False
    else:
        shell_flag = True

    logging.info('\nInitializing Earth Engine')
    ee.Initialize()
    ee.Number(1).getInfo()

    # Get list of existing images/files
    logging.debug('\nGetting GEE asset list')
    asset_list = utils.get_ee_assets(dt_daily_coll_id, shell_flag=shell_flag)
    logging.debug('Displaying first 10 images in collection')
    logging.debug(asset_list[:10])

    # Filter asset list by INI start_date and end_date
    logging.debug('\nFiltering by INI start_date and end_date')
    asset_re = re.compile('[\w_]+/(\d{8})_\d{8}')
    asset_list = [
        asset_id for asset_id in asset_list
        if (start_dt <= datetime.datetime.strptime(
            asset_re.findall(asset_id)[0], '%Y%m%d') and datetime.datetime.
            strptime(asset_re.findall(asset_id)[0], '%Y%m%d') <= end_dt)
    ]
    if not asset_list:
        logging.info('Empty asset ID list after filter by start/end date, '
                     'exiting')
        return True
    logging.debug('Displaying first 10 images in collection')
    logging.debug(asset_list[:10])

    # Group asset IDs by image date
    asset_id_dict = defaultdict(list)
    for asset_id in asset_list:
        asset_dt = datetime.datetime.strptime(
            asset_id.split('/')[-1].split('_')[0], '%Y%m%d')
        asset_id_dict[asset_dt.strftime('%Y-%m-%d')].append(asset_id)
    # pprint.pprint(asset_id_dict)

    # Remove all but the last image when sorted by export date
    logging.info('\nRemoving assets')
    for key, asset_list in asset_id_dict.items():
        # logging.debug('{}'.format(key))
        if len(asset_list) >= 2:
            for asset_id in sorted(asset_list)[:-1]:
                logging.info('  Delete: {}'.format(asset_id))
                try:
                    ee.data.deleteAsset(asset_id)
                except Exception as e:
                    logging.info('  Unhandled exception, skipping')
                    logging.debug(e)
                    continue
Exemple #8
0
def main(ini_path=None,
         overwrite_flag=False,
         delay_time=0,
         gee_key_file=None,
         max_ready=-1,
         cron_flag=False,
         reverse_flag=False,
         update_flag=False):
    """Compute scene Tcorr images by WRS2 tile

    Parameters
    ----------
    ini_path : str
        Input file path.
    overwrite_flag : bool, optional
        If True, overwrite existing files if the export dates are the same and
        generate new images (but with different export dates) even if the tile
        lists are the same.  The default is False.
    delay_time : float, optional
        Delay time in seconds between starting export tasks (or checking the
        number of queued tasks, see "max_ready" parameter).  The default is 0.
    gee_key_file : str, None, optional
        Earth Engine service account JSON key file (the default is None).
    max_ready: int, optional
        Maximum number of queued "READY" tasks.  The default is -1 which is
        implies no limit to the number of tasks that will be submitted.
    cron_flag: bool, optional
        Not currently implemented.
    reverse_flag : bool, optional
        If True, process WRS2 tiles and dates in reverse order.
    update_flag : bool, optional
        If True, only overwrite scenes with an older model version.

    """
    logging.info('\nCompute scene Tcorr images by WRS2 tile')

    ini = utils.read_ini(ini_path)

    model_name = 'SSEBOP'
    # model_name = ini['INPUTS']['et_model'].upper()

    tmax_name = ini[model_name]['tmax_source']

    export_id_fmt = 'tcorr_scene_{product}_{scene_id}'
    asset_id_fmt = '{coll_id}/{scene_id}'

    tcorr_scene_coll_id = '{}/{}_scene'.format(ini['EXPORT']['export_coll'],
                                               tmax_name.lower())

    wrs2_coll_id = 'projects/earthengine-legacy/assets/' \
                   'projects/usgs-ssebop/wrs2_descending_custom'
    wrs2_tile_field = 'WRS2_TILE'
    wrs2_path_field = 'ROW'
    wrs2_row_field = 'PATH'

    try:
        wrs2_tiles = str(ini['INPUTS']['wrs2_tiles'])
        wrs2_tiles = sorted([x.strip() for x in wrs2_tiles.split(',')])
    except KeyError:
        wrs2_tiles = []
        logging.debug('  wrs2_tiles: not set in INI, defaulting to []')
    except Exception as e:
        raise e

    try:
        study_area_extent = str(ini['INPUTS']['study_area_extent']) \
            .replace('[', '').replace(']', '').split(',')
        study_area_extent = [float(x.strip()) for x in study_area_extent]
    except KeyError:
        study_area_extent = None
        logging.debug('  study_area_extent: not set in INI')
    except Exception as e:
        raise e

    # TODO: Add try/except blocks and default values?
    collections = [x.strip() for x in ini['INPUTS']['collections'].split(',')]
    cloud_cover = float(ini['INPUTS']['cloud_cover'])
    min_pixel_count = float(ini['TCORR']['min_pixel_count'])
    # min_scene_count = float(ini['TCORR']['min_scene_count'])

    if (tmax_name.upper() == 'CIMIS'
            and ini['INPUTS']['end_date'] < '2003-10-01'):
        logging.error(
            '\nCIMIS is not currently available before 2003-10-01, exiting\n')
        sys.exit()
    elif (tmax_name.upper() == 'DAYMET'
          and ini['INPUTS']['end_date'] > '2018-12-31'):
        logging.warning('\nDAYMET is not currently available past 2018-12-31, '
                        'using median Tmax values\n')
        # sys.exit()
    # elif (tmax_name.upper() == 'TOPOWX' and
    #         ini['INPUTS']['end_date'] > '2017-12-31'):
    #     logging.warning(
    #         '\nDAYMET is not currently available past 2017-12-31, '
    #         'using median Tmax values\n')
    #     # sys.exit()

    # Extract the model keyword arguments from the INI
    # Set the property name to lower case and try to cast values to numbers
    model_args = {
        k.lower(): float(v) if utils.is_number(v) else v
        for k, v in dict(ini[model_name]).items()
    }
    # et_reference_args = {
    #     k: model_args.pop(k)
    #     for k in [k for k in model_args.keys() if k.startswith('et_reference_')]}

    logging.info('\nInitializing Earth Engine')
    if gee_key_file:
        logging.info(
            '  Using service account key file: {}'.format(gee_key_file))
        # The "EE_ACCOUNT" parameter is not used if the key file is valid
        ee.Initialize(ee.ServiceAccountCredentials('x', key_file=gee_key_file),
                      use_cloud_api=True)
    else:
        ee.Initialize(use_cloud_api=True)

    # Get a Tmax image to set the Tcorr values to
    logging.debug('\nTmax properties')
    tmax_source = tmax_name.split('_', 1)[0]
    tmax_version = tmax_name.split('_', 1)[1]
    if 'MEDIAN' in tmax_name.upper():
        tmax_coll_id = 'projects/earthengine-legacy/assets/' \
                       'projects/usgs-ssebop/tmax/{}'.format(tmax_name.lower())
        tmax_coll = ee.ImageCollection(tmax_coll_id)
        tmax_mask = ee.Image(tmax_coll.first()).select([0]).multiply(0)
    else:
        # TODO: Add support for non-median tmax sources
        raise ValueError('unsupported tmax_source: {}'.format(tmax_name))
    logging.debug('  Collection: {}'.format(tmax_coll_id))
    logging.debug('  Source:  {}'.format(tmax_source))
    logging.debug('  Version: {}'.format(tmax_version))

    logging.debug('\nExport properties')
    export_info = utils.get_info(ee.Image(tmax_mask))
    if 'daymet' in tmax_name.lower():
        # Custom smaller extent for DAYMET focused on CONUS
        export_extent = [-1999750, -1890500, 2500250, 1109500]
        export_shape = [4500, 3000]
        export_geo = [1000, 0, -1999750, 0, -1000, 1109500]
        # Custom medium extent for DAYMET of CONUS, Mexico, and southern Canada
        # export_extent = [-2099750, -3090500, 2900250, 1909500]
        # export_shape = [5000, 5000]
        # export_geo = [1000, 0, -2099750, 0, -1000, 1909500]
        export_crs = export_info['bands'][0]['crs']
    else:
        export_crs = export_info['bands'][0]['crs']
        export_geo = export_info['bands'][0]['crs_transform']
        export_shape = export_info['bands'][0]['dimensions']
        # export_geo = ee.Image(tmax_mask).projection().getInfo()['transform']
        # export_crs = ee.Image(tmax_mask).projection().getInfo()['crs']
        # export_shape = ee.Image(tmax_mask).getInfo()['bands'][0]['dimensions']
        export_extent = [
            export_geo[2], export_geo[5] + export_shape[1] * export_geo[4],
            export_geo[2] + export_shape[0] * export_geo[0], export_geo[5]
        ]
    export_geom = ee.Geometry.Rectangle(export_extent,
                                        proj=export_crs,
                                        geodesic=False)
    logging.debug('  CRS: {}'.format(export_crs))
    logging.debug('  Extent: {}'.format(export_extent))
    logging.debug('  Geo: {}'.format(export_geo))
    logging.debug('  Shape: {}'.format(export_shape))

    if study_area_extent is None:
        if 'daymet' in tmax_name.lower():
            # CGM - For now force DAYMET to a slightly smaller "CONUS" extent
            study_area_extent = [-125, 25, -65, 49]
            # study_area_extent =  [-125, 25, -65, 52]
        elif 'cimis' in tmax_name.lower():
            study_area_extent = [-124, 35, -119, 42]
        else:
            # TODO: Make sure output from bounds is in WGS84
            study_area_extent = tmax_mask.geometry().bounds().getInfo()
        logging.debug(f'\nStudy area extent not set in INI, '
                      f'default to {study_area_extent}')
    study_area_geom = ee.Geometry.Rectangle(study_area_extent,
                                            proj='EPSG:4326',
                                            geodesic=False)

    # For now define the study area from an extent
    if study_area_extent:
        study_area_geom = ee.Geometry.Rectangle(study_area_extent,
                                                proj='EPSG:4326',
                                                geodesic=False)
        export_geom = export_geom.intersection(study_area_geom, 1)
        # logging.debug('  Extent: {}'.format(export_geom.bounds().getInfo()))

    # If cell_size parameter is set in the INI,
    # adjust the output cellsize and recompute the transform and shape
    try:
        export_cs = float(ini['EXPORT']['cell_size'])
        export_shape = [
            int(math.ceil(abs((export_shape[0] * export_geo[0]) / export_cs))),
            int(math.ceil(abs((export_shape[1] * export_geo[4]) / export_cs)))
        ]
        export_geo = [
            export_cs, 0.0, export_geo[2], 0.0, -export_cs, export_geo[5]
        ]
        logging.debug('  Custom export cell size: {}'.format(export_cs))
        logging.debug('  Geo: {}'.format(export_geo))
        logging.debug('  Shape: {}'.format(export_shape))
    except KeyError:
        pass

    if not ee.data.getInfo(tcorr_scene_coll_id):
        logging.info('\nExport collection does not exist and will be built'
                     '\n  {}'.format(tcorr_scene_coll_id))
        input('Press ENTER to continue')
        ee.data.createAsset({'type': 'IMAGE_COLLECTION'}, tcorr_scene_coll_id)

    # Get current asset list
    logging.debug('\nGetting GEE asset list')
    asset_list = utils.get_ee_assets(tcorr_scene_coll_id)
    # if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
    #     pprint.pprint(asset_list[:10])

    # Get current running tasks
    tasks = utils.get_ee_tasks()
    if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
        logging.debug('  Tasks: {}\n'.format(len(tasks)))
        input('ENTER')

    # TODO: Decide if month and year lists should be applied to scene exports
    # # Limit by year and month
    # try:
    #     month_list = sorted(list(utils.parse_int_set(ini['TCORR']['months'])))
    # except:
    #     logging.info('\nTCORR "months" parameter not set in the INI,'
    #                  '\n  Defaulting to all months (1-12)\n')
    #     month_list = list(range(1, 13))
    # try:
    #     year_list = sorted(list(utils.parse_int_set(ini['TCORR']['years'])))
    # except:
    #     logging.info('\nTCORR "years" parameter not set in the INI,'
    #                  '\n  Defaulting to all available years\n')
    #     year_list = []

    # if cron_flag:
    #     # CGM - This seems like a silly way of getting the date as a datetime
    #     #   Why am I doing this and not using the commented out line?
    #     end_dt = datetime.date.today().strftime('%Y-%m-%d')
    #     end_dt = datetime.datetime.strptime(end_dt, '%Y-%m-%d')
    #     end_dt = end_dt + datetime.timedelta(days=-4)
    #     # end_dt = datetime.datetime.today() + datetime.timedelta(days=-1)
    #     start_dt = end_dt + datetime.timedelta(days=-64)
    # else:
    #     start_dt = datetime.datetime.strptime(
    #         ini['INPUTS']['start_date'], '%Y-%m-%d')
    #     end_dt = datetime.datetime.strptime(
    #         ini['INPUTS']['end_date'], '%Y-%m-%d')
    start_dt = datetime.datetime.strptime(ini['INPUTS']['start_date'],
                                          '%Y-%m-%d')
    end_dt = datetime.datetime.strptime(ini['INPUTS']['end_date'], '%Y-%m-%d')
    if end_dt >= datetime.datetime.today():
        logging.debug('End Date:   {} - setting end date to current '
                      'date'.format(end_dt.strftime('%Y-%m-%d')))
        end_dt = datetime.datetime.today()
    if start_dt < datetime.datetime(1984, 3, 23):
        logging.debug('Start Date: {} - no Landsat 5+ images before '
                      '1984-03-23'.format(start_dt.strftime('%Y-%m-%d')))
        start_dt = datetime.datetime(1984, 3, 23)
    start_date = start_dt.strftime('%Y-%m-%d')
    end_date = end_dt.strftime('%Y-%m-%d')
    # next_date = (start_dt + datetime.timedelta(days=1)).strftime('%Y-%m-%d')
    logging.debug('Start Date: {}'.format(start_date))
    logging.debug('End Date:   {}\n'.format(end_date))
    if start_dt > end_dt:
        raise ValueError('start date must be before end date')

    # Get the list of WRS2 tiles that intersect the data area and study area
    wrs2_coll = ee.FeatureCollection(wrs2_coll_id) \
        .filterBounds(export_geom) \
        .filterBounds(study_area_geom)
    if wrs2_tiles:
        wrs2_coll = wrs2_coll.filter(
            ee.Filter.inList(wrs2_tile_field, wrs2_tiles))
    wrs2_info = wrs2_coll.getInfo()['features']
    # pprint.pprint(wrs2_info)
    # input('ENTER')

    # Iterate over WRS2 tiles (default is from west to east)
    for wrs2_ftr in sorted(wrs2_info,
                           key=lambda k: k['properties']['WRS2_TILE'],
                           reverse=not (reverse_flag)):
        wrs2_tile = wrs2_ftr['properties'][wrs2_tile_field]
        logging.info('{}'.format(wrs2_tile))

        wrs2_path = int(wrs2_tile[1:4])
        wrs2_row = int(wrs2_tile[5:8])
        # wrs2_path = wrs2_ftr['properties']['PATH']
        # wrs2_row = wrs2_ftr['properties']['ROW']

        wrs2_filter = [{
            'type': 'equals',
            'leftField': 'WRS_PATH',
            'rightValue': wrs2_path
        }, {
            'type': 'equals',
            'leftField': 'WRS_ROW',
            'rightValue': wrs2_row
        }]
        filter_args = {c: wrs2_filter for c in collections}

        # Build and merge the Landsat collections
        model_obj = ssebop.Collection(
            collections=collections,
            start_date=start_date,
            end_date=end_date,
            cloud_cover_max=cloud_cover,
            geometry=ee.Geometry(wrs2_ftr['geometry']),
            model_args=model_args,
            filter_args=filter_args,
        )
        landsat_coll = model_obj.overpass(variables=['ndvi'])
        # pprint.pprint(landsat_coll.aggregate_array('system:id').getInfo())
        # input('ENTER')

        try:
            image_id_list = landsat_coll.aggregate_array('system:id').getInfo()
        except Exception as e:
            logging.warning('  Error getting image ID list, skipping tile')
            logging.debug(f'  {e}')
            continue

        if update_flag:
            assets_info = utils.get_info(
                ee.ImageCollection(tcorr_scene_coll_id).filterMetadata(
                    'wrs2_tile', 'equals',
                    wrs2_tile).filterDate(start_date, end_date))
            asset_props = {
                f'{tcorr_scene_coll_id}/{x["properties"]["system:index"]}':
                x['properties']
                for x in assets_info['features']
            }
        else:
            asset_props = {}

        # Sort by date
        for image_id in sorted(image_id_list,
                               key=lambda k: k.split('/')[-1].split('_')[-1],
                               reverse=reverse_flag):
            scene_id = image_id.split('/')[-1]
            logging.info(f'{scene_id}')

            export_dt = datetime.datetime.strptime(
                scene_id.split('_')[-1], '%Y%m%d')
            export_date = export_dt.strftime('%Y-%m-%d')
            # next_date = (export_dt + datetime.timedelta(days=1)).strftime('%Y-%m-%d')

            # # Uncomment to apply month and year list filtering
            # if month_list and export_dt.month not in month_list:
            #     logging.debug(f'  Date: {export_date} - month not in INI - skipping')
            #     continue
            # elif year_list and export_dt.year not in year_list:
            #     logging.debug(f'  Date: {export_date} - year not in INI - skipping')
            #     continue

            logging.debug(f'  Date: {export_date}')

            export_id = export_id_fmt.format(product=tmax_name.lower(),
                                             scene_id=scene_id)
            logging.debug(f'  Export ID: {export_id}')

            asset_id = asset_id_fmt.format(coll_id=tcorr_scene_coll_id,
                                           scene_id=scene_id)
            logging.debug(f'  Asset ID: {asset_id}')

            if update_flag:

                def version_number(version_str):
                    return list(map(int, version_str.split('.')))

                if export_id in tasks.keys():
                    logging.info('  Task already submitted, skipping')
                    continue
                # In update mode only overwrite if the version is old
                if asset_props and asset_id in asset_props.keys():
                    model_ver = version_number(ssebop.__version__)
                    asset_ver = version_number(
                        asset_props[asset_id]['model_version'])

                    if asset_ver < model_ver:
                        logging.info('  Asset model version is old, removing')
                        try:
                            ee.data.deleteAsset(asset_id)
                        except:
                            logging.info('  Error removing asset, skipping')
                            continue
                    else:
                        logging.info('  Asset is up to date, skipping')
                        continue
            elif overwrite_flag:
                if export_id in tasks.keys():
                    logging.debug('  Task already submitted, cancelling')
                    ee.data.cancelTask(tasks[export_id]['id'])
                # This is intentionally not an "elif" so that a task can be
                # cancelled and an existing image/file/asset can be removed
                if asset_id in asset_list:
                    logging.debug('  Asset already exists, removing')
                    ee.data.deleteAsset(asset_id)
            else:
                if export_id in tasks.keys():
                    logging.debug('  Task already submitted, exiting')
                    continue
                elif asset_id in asset_list:
                    logging.debug('  Asset already exists, skipping')
                    continue

            image = ee.Image(image_id)
            # TODO: Will need to be changed for SR or use from_image_id()
            t_obj = ssebop.Image.from_landsat_c1_toa(image_id, **model_args)
            t_stats = ee.Dictionary(t_obj.tcorr_stats) \
                .combine({'tcorr_p5': 0, 'tcorr_count': 0}, overwrite=False)
            tcorr = ee.Number(t_stats.get('tcorr_p5'))
            count = ee.Number(t_stats.get('tcorr_count'))
            index = ee.Algorithms.If(count.gte(min_pixel_count), 0, 9)

            # Write an empty image if the pixel count is too low
            tcorr_img = ee.Algorithms.If(count.gt(min_pixel_count),
                                         tmax_mask.add(tcorr),
                                         tmax_mask.updateMask(0))

            # Clip to the Landsat image footprint
            output_img = ee.Image(tcorr_img).clip(image.geometry())

            # Clear the transparency mask
            output_img = output_img.updateMask(output_img.unmask(0)) \
                .rename(['tcorr']) \
                .set({
                    'CLOUD_COVER': image.get('CLOUD_COVER'),
                    'CLOUD_COVER_LAND': image.get('CLOUD_COVER_LAND'),
                    # 'SPACECRAFT_ID': image.get('SPACECRAFT_ID'),
                    'coll_id': image_id.split('/')[0],
                    # 'cycle_day': ((export_dt - cycle_base_dt).days % 8) + 1,
                    'date_ingested': datetime.datetime.today().strftime('%Y-%m-%d'),
                    'date': export_dt.strftime('%Y-%m-%d'),
                    'doy': int(export_dt.strftime('%j')),
                    'model_name': model_name,
                    'model_version': ssebop.__version__,
                    'month': int(export_dt.month),
                    'scene_id': image_id.split('/')[-1],
                    'system:time_start': image.get('system:time_start'),
                    'tcorr_value': tcorr,
                    'tcorr_index': index,
                    'tcorr_pixel_count': count,
                    'tmax_source': tmax_source.upper(),
                    'tmax_version': tmax_version.upper(),
                    'wrs2_path': wrs2_path,
                    'wrs2_row': wrs2_row,
                    'wrs2_tile': wrs2_tile,
                    'year': int(export_dt.year),
                })
            # pprint.pprint(output_img.getInfo()['properties'])
            # input('ENTER')

            logging.debug('  Building export task')
            task = ee.batch.Export.image.toAsset(
                image=output_img,
                description=export_id,
                assetId=asset_id,
                crs=export_crs,
                crsTransform='[' + ','.join(list(map(str, export_geo))) + ']',
                dimensions='{0}x{1}'.format(*export_shape),
            )

            logging.info('  Starting export task')
            utils.ee_task_start(task)

        # Pause before starting the next date (not export task)
        utils.delay_task(delay_time, max_ready)
        logging.debug('')
Exemple #9
0
def main(ini_path=None,
         overwrite_flag=False,
         delay_time=0,
         gee_key_file=None,
         max_ready=-1):
    """Compute default Tcorr image asset

    Parameters
    ----------
    ini_path : str
        Input file path.
    overwrite_flag : bool, optional
        If True, overwrite existing files (the default is False).
    delay_time : float, optional
        Delay time in seconds between starting export tasks (or checking the
        number of queued tasks, see "max_ready" parameter).  The default is 0.
    gee_key_file : str, None, optional
        Earth Engine service account JSON key file (the default is None).
    max_ready: int, optional
        Maximum number of queued "READY" tasks.  The default is -1 which is
        implies no limit to the number of tasks that will be submitted.

    """
    logging.info('\nCompute default Tcorr image asset')

    ini = utils.read_ini(ini_path)

    model_name = 'SSEBOP'
    # model_name = ini['INPUTS']['et_model'].upper()

    tmax_name = ini[model_name]['tmax_source']

    export_id_fmt = 'tcorr_image_{product}_default'

    tcorr_daily_coll_id = '{}/{}_daily'.format(ini['EXPORT']['export_coll'],
                                               tmax_name.lower())
    tcorr_default_img_id = '{}/{}_default'.format(ini['EXPORT']['export_coll'],
                                                  tmax_name.lower())

    try:
        tcorr_default = ini[model_name]['tcorr_default']
    except:
        tcorr_default = 0.978

    if (tmax_name.upper() == 'CIMIS'
            and ini['INPUTS']['end_date'] < '2003-10-01'):
        logging.error(
            '\nCIMIS is not currently available before 2003-10-01, exiting\n')
        sys.exit()
    elif (tmax_name.upper() == 'DAYMET'
          and ini['INPUTS']['end_date'] > '2018-12-31'):
        logging.warning('\nDAYMET is not currently available past 2018-12-31, '
                        'using median Tmax values\n')
        # sys.exit()
    # elif (tmax_name.upper() == 'TOPOWX' and
    #         ini['INPUTS']['end_date'] > '2017-12-31'):
    #     logging.warning(
    #         '\nDAYMET is not currently available past 2017-12-31, '
    #         'using median Tmax values\n')
    #     # sys.exit()

    logging.info('\nInitializing Earth Engine')
    if gee_key_file:
        logging.info(
            '  Using service account key file: {}'.format(gee_key_file))
        # The "EE_ACCOUNT" parameter is not used if the key file is valid
        ee.Initialize(ee.ServiceAccountCredentials('x', key_file=gee_key_file))
    else:
        ee.Initialize()

    logging.debug('\nTmax properties')
    tmax_source = tmax_name.split('_', 1)[0]
    tmax_version = tmax_name.split('_', 1)[1]
    # tmax_coll_id = 'projects/earthengine-legacy/assets/' \
    #                'projects/usgs-ssebop/tmax/{}'.format(tmax_name.lower())
    # tmax_coll = ee.ImageCollection(tmax_coll_id)
    # tmax_mask = ee.Image(tmax_coll.first()).select([0]).multiply(0)
    # logging.debug('  Collection: {}'.format(tmax_coll_id))
    logging.debug('  Source: {}'.format(tmax_source))
    logging.debug('  Version: {}'.format(tmax_version))

    # Get the Tcorr daily image collection properties
    logging.debug('\nTcorr Image properties')
    tcorr_img = ee.Image(ee.ImageCollection(tcorr_daily_coll_id).first())
    tcorr_info = utils.get_info(ee.Image(tcorr_img))
    tcorr_geo = tcorr_info['bands'][0]['crs_transform']
    tcorr_crs = tcorr_info['bands'][0]['crs']
    tcorr_shape = tcorr_info['bands'][0]['dimensions']
    # tcorr_geo = ee.Image(tcorr_img).projection().getInfo()['transform']
    # tcorr_crs = ee.Image(tcorr_img).projection().getInfo()['crs']
    # tcorr_shape = ee.Image(tcorr_img).getInfo()['bands'][0]['dimensions']
    tcorr_extent = [
        tcorr_geo[2], tcorr_geo[5] + tcorr_shape[1] * tcorr_geo[4],
        tcorr_geo[2] + tcorr_shape[0] * tcorr_geo[0], tcorr_geo[5]
    ]
    logging.debug('  Shape: {}'.format(tcorr_shape))
    logging.debug('  Extent: {}'.format(tcorr_extent))
    logging.debug('  Geo: {}'.format(tcorr_geo))
    logging.debug('  CRS: {}'.format(tcorr_crs))

    # Get current running tasks
    tasks = utils.get_ee_tasks()
    if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
        logging.debug('  Tasks: {}\n'.format(len(tasks)))
        input('ENTER')

    # # Limit by year
    # try:
    #     year_list = sorted(list(utils.parse_int_set(ini['TCORR']['years'])))
    # except:
    #     logging.info('\nTCORR "years" parameter not set in the INI,'
    #                  '\n  Defaulting to all available years\n')
    #     year_list = []

    export_id = export_id_fmt.format(product=tmax_name.lower())
    logging.info('  Export ID: {}'.format(export_id))
    logging.info('  Asset ID: {}'.format(tcorr_default_img_id))

    if overwrite_flag:
        if export_id in tasks.keys():
            logging.debug('  Task already submitted, cancelling')
            ee.data.cancelTask(tasks[export_id]['id'])
        # This is intentionally not an "elif" so that a task can be
        # cancelled and an existing image/file/asset can be removed
        if ee.data.getInfo(tcorr_default_img_id):
            logging.debug('  Asset already exists, removing')
            ee.data.deleteAsset(tcorr_default_img_id)
    else:
        if export_id in tasks.keys():
            logging.debug('  Task already submitted, exiting')
            return False
        elif ee.data.getInfo(tcorr_default_img_id):
            logging.debug('  Asset already exists, exiting')
            return False

    tcorr_daily_coll = ee.ImageCollection(tcorr_daily_coll_id)

    output_img = tcorr_daily_coll.mosaic().multiply(0).add(tcorr_default)\
        .updateMask(1).rename(['tcorr'])\
        .set({
            # 'system:time_start': utils.millis(iter_start_dt),
            'date_ingested': datetime.datetime.today().strftime('%Y-%m-%d'),
            'model_name': model_name,
            'model_version': ssebop.__version__,
            'tmax_source': tmax_source.upper(),
            'tmax_version': tmax_version.upper(),
        })

    logging.debug('  Building export task')
    task = ee.batch.Export.image.toAsset(
        image=ee.Image(output_img),
        description=export_id,
        assetId=tcorr_default_img_id,
        crs=tcorr_crs,
        crsTransform='[' + ','.join(list(map(str, tcorr_geo))) + ']',
        dimensions='{0}x{1}'.format(*tcorr_shape),
    )

    logging.debug('  Starting export task')
    utils.ee_task_start(task)

    # Pause before starting the next export task
    utils.delay_task(delay_time, max_ready)
    logging.debug('')
def main(ini_path=None, overwrite_flag=False, delay_time=0, gee_key_file=None,
         max_ready=-1, cron_flag=False, reverse_flag=False):
    """Compute daily Tcorr images

    Parameters
    ----------
    ini_path : str
        Input file path.
    overwrite_flag : bool, optional
        If True, overwrite existing files if the export dates are the same and
        generate new images (but with different export dates) even if the tile
        lists are the same.  The default is False.
    delay_time : float, optional
        Delay time in seconds between starting export tasks (or checking the
        number of queued tasks, see "max_ready" parameter).  The default is 0.
    gee_key_file : str, None, optional
        Earth Engine service account JSON key file (the default is None).
    max_ready: int, optional
        Maximum number of queued "READY" tasks.  The default is -1 which is
        implies no limit to the number of tasks that will be submitted.
    cron_flag : bool, optional
        If True, only compute Tcorr daily image if existing image does not have
        all available image (using the 'wrs2_tiles' property) and limit the
        date range to the last 64 days (~2 months).
    reverse_flag : bool, optional
        If True, process dates in reverse order.
    """
    logging.info('\nCompute daily Tcorr images')

    ini = utils.read_ini(ini_path)

    model_name = 'SSEBOP'
    # model_name = ini['INPUTS']['et_model'].upper()

    tmax_name = ini[model_name]['tmax_source']

    export_id_fmt = 'tcorr_image_{product}_{date}_{export}'
    asset_id_fmt = '{coll_id}/{date}_{export}'

    tcorr_daily_coll_id = '{}/{}_daily'.format(
        ini['EXPORT']['export_coll'], tmax_name.lower())

    if (tmax_name.upper() == 'CIMIS' and
            ini['INPUTS']['end_date'] < '2003-10-01'):
        logging.error(
            '\nCIMIS is not currently available before 2003-10-01, exiting\n')
        sys.exit()
    elif (tmax_name.upper() == 'DAYMET' and
            ini['INPUTS']['end_date'] > '2018-12-31'):
        logging.warning(
            '\nDAYMET is not currently available past 2018-12-31, '
            'using median Tmax values\n')
        # sys.exit()
    # elif (tmax_name.upper() == 'TOPOWX' and
    #         ini['INPUTS']['end_date'] > '2017-12-31'):
    #     logging.warning(
    #         '\nDAYMET is not currently available past 2017-12-31, '
    #         'using median Tmax values\n')
    #     # sys.exit()

    # Extract the model keyword arguments from the INI
    # Set the property name to lower case and try to cast values to numbers
    model_args = {
        k.lower(): float(v) if utils.is_number(v) else v
        for k, v in dict(ini[model_name]).items()}
    # et_reference_args = {
    #     k: model_args.pop(k)
    #     for k in [k for k in model_args.keys() if k.startswith('et_reference_')]}


    logging.info('\nInitializing Earth Engine')
    if gee_key_file:
        logging.info('  Using service account key file: {}'.format(gee_key_file))
        # The "EE_ACCOUNT" parameter is not used if the key file is valid
        ee.Initialize(ee.ServiceAccountCredentials('x', key_file=gee_key_file),
                      use_cloud_api=True)
    else:
        ee.Initialize(use_cloud_api=True)

    # Get a Tmax image to set the Tcorr values to
    logging.debug('\nTmax properties')
    tmax_source = tmax_name.split('_', 1)[0]
    tmax_version = tmax_name.split('_', 1)[1]
    if 'MEDIAN' in tmax_name.upper():
        tmax_coll_id = 'projects/earthengine-legacy/assets/' \
                       'projects/usgs-ssebop/tmax/{}'.format(tmax_name.lower())
        tmax_coll = ee.ImageCollection(tmax_coll_id)
        tmax_mask = ee.Image(tmax_coll.first()).select([0]).multiply(0)
    else:
        # TODO: Add support for non-median tmax sources
        raise ValueError('unsupported tmax_source: {}'.format(tmax_name))
    logging.debug('  Collection: {}'.format(tmax_coll_id))
    logging.debug('  Source:  {}'.format(tmax_source))
    logging.debug('  Version: {}'.format(tmax_version))

    logging.debug('\nExport properties')
    export_info = utils.get_info(ee.Image(tmax_mask))
    if 'daymet' in tmax_name.lower():
        # Custom smaller extent for DAYMET focused on CONUS
        export_extent = [-1999750, -1890500, 2500250, 1109500]
        export_shape = [4500, 3000]
        export_geo = [1000, 0, -1999750, 0, -1000, 1109500]
        # Custom medium extent for DAYMET of CONUS, Mexico, and southern Canada
        # export_extent = [-2099750, -3090500, 2900250, 1909500]
        # export_shape = [5000, 5000]
        # export_geo = [1000, 0, -2099750, 0, -1000, 1909500]
        export_crs = export_info['bands'][0]['crs']
    else:
        export_crs = export_info['bands'][0]['crs']
        export_geo = export_info['bands'][0]['crs_transform']
        export_shape = export_info['bands'][0]['dimensions']
        # export_geo = ee.Image(tmax_mask).projection().getInfo()['transform']
        # export_crs = ee.Image(tmax_mask).projection().getInfo()['crs']
        # export_shape = ee.Image(tmax_mask).getInfo()['bands'][0]['dimensions']
        export_extent = [
            export_geo[2], export_geo[5] + export_shape[1] * export_geo[4],
            export_geo[2] + export_shape[0] * export_geo[0], export_geo[5]]
    logging.debug('  CRS: {}'.format(export_crs))
    logging.debug('  Extent: {}'.format(export_extent))
    logging.debug('  Geo: {}'.format(export_geo))
    logging.debug('  Shape: {}'.format(export_shape))


    # This extent will limit the WRS2 tiles that are included
    # This is needed especially for non-median DAYMET Tmax since the default
    #   extent is huge but we are only processing a subset
    if 'daymet' in tmax_name.lower():
        export_geom = ee.Geometry.Rectangle(
            [-125, 25, -65, 53], proj='EPSG:4326', geodesic=False)
        # export_geom = ee.Geometry.Rectangle(
        #     [-135, 15, -55, 60], proj='EPSG:4326', geodesic=False)
    elif 'cimis' in tmax_name.lower():
        export_geom = ee.Geometry.Rectangle(
            [-124, 35, -119, 42], proj='EPSG:4326', geodesic=False)
    else:
        export_geom = tmax_mask.geometry()


    # If cell_size parameter is set in the INI,
    # adjust the output cellsize and recompute the transform and shape
    try:
        export_cs = float(ini['EXPORT']['cell_size'])
        export_shape = [
            int(math.ceil(abs((export_shape[0] * export_geo[0]) / export_cs))),
            int(math.ceil(abs((export_shape[1] * export_geo[4]) / export_cs)))]
        export_geo = [export_cs, 0.0, export_geo[2], 0.0, -export_cs, export_geo[5]]
        logging.debug('  Custom export cell size: {}'.format(export_cs))
        logging.debug('  Geo: {}'.format(export_geo))
        logging.debug('  Shape: {}'.format(export_shape))
    except KeyError:
        pass

    if not ee.data.getInfo(tcorr_daily_coll_id):
        logging.info('\nExport collection does not exist and will be built'
                     '\n  {}'.format(tcorr_daily_coll_id))
        input('Press ENTER to continue')
        ee.data.createAsset({'type': 'IMAGE_COLLECTION'}, tcorr_daily_coll_id)

    # Get current asset list
    logging.debug('\nGetting GEE asset list')
    asset_list = utils.get_ee_assets(tcorr_daily_coll_id)
    if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
        pprint.pprint(asset_list[:10])

    # Get current running tasks
    tasks = utils.get_ee_tasks()
    if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
        logging.debug('  Tasks: {}\n'.format(len(tasks)))
        input('ENTER')


    collections = [x.strip() for x in ini['INPUTS']['collections'].split(',')]

    # Limit by year and month
    try:
        month_list = sorted(list(utils.parse_int_set(ini['TCORR']['months'])))
    except:
        logging.info('\nTCORR "months" parameter not set in the INI,'
                     '\n  Defaulting to all months (1-12)\n')
        month_list = list(range(1, 13))
    try:
        year_list = sorted(list(utils.parse_int_set(ini['TCORR']['years'])))
    except:
        logging.info('\nTCORR "years" parameter not set in the INI,'
                     '\n  Defaulting to all available years\n')
        year_list = []

    # Key is cycle day, value is a reference date on that cycle
    # Data from: https://landsat.usgs.gov/landsat_acq
    # I only need to use 8 cycle days because of 5/7 and 7/8 are offset
    cycle_dates = {
        7: '1970-01-01',
        8: '1970-01-02',
        1: '1970-01-03',
        2: '1970-01-04',
        3: '1970-01-05',
        4: '1970-01-06',
        5: '1970-01-07',
        6: '1970-01-08',
    }
    # cycle_dates = {
    #     1:  '2000-01-06',
    #     2:  '2000-01-07',
    #     3:  '2000-01-08',
    #     4:  '2000-01-09',
    #     5:  '2000-01-10',
    #     6:  '2000-01-11',
    #     7:  '2000-01-12',
    #     8:  '2000-01-13',
    #     # 9:  '2000-01-14',
    #     # 10: '2000-01-15',
    #     # 11: '2000-01-16',
    #     # 12: '2000-01-01',
    #     # 13: '2000-01-02',
    #     # 14: '2000-01-03',
    #     # 15: '2000-01-04',
    #     # 16: '2000-01-05',
    # }
    cycle_base_dt = datetime.datetime.strptime(cycle_dates[1], '%Y-%m-%d')

    if cron_flag:
        # CGM - This seems like a silly way of getting the date as a datetime
        #   Why am I doing this and not using the commented out line?
        iter_end_dt = datetime.date.today().strftime('%Y-%m-%d')
        iter_end_dt = datetime.datetime.strptime(iter_end_dt, '%Y-%m-%d')
        iter_end_dt = iter_end_dt + datetime.timedelta(days=-4)
        # iter_end_dt = datetime.datetime.today() + datetime.timedelta(days=-1)
        iter_start_dt = iter_end_dt + datetime.timedelta(days=-64)
    else:
        iter_start_dt = datetime.datetime.strptime(
            ini['INPUTS']['start_date'], '%Y-%m-%d')
        iter_end_dt = datetime.datetime.strptime(
            ini['INPUTS']['end_date'], '%Y-%m-%d')
    logging.debug('Start Date: {}'.format(iter_start_dt.strftime('%Y-%m-%d')))
    logging.debug('End Date:   {}\n'.format(iter_end_dt.strftime('%Y-%m-%d')))


    for export_dt in sorted(utils.date_range(iter_start_dt, iter_end_dt),
                            reverse=reverse_flag):
        export_date = export_dt.strftime('%Y-%m-%d')
        next_date = (export_dt + datetime.timedelta(days=1)).strftime('%Y-%m-%d')
        if month_list and export_dt.month not in month_list:
            logging.debug(f'Date: {export_date} - month not in INI - skipping')
            continue
        elif year_list and export_dt.year not in year_list:
            logging.debug(f'Date: {export_date} - year not in INI - skipping')
            continue
        elif export_date >= datetime.datetime.today().strftime('%Y-%m-%d'):
            logging.debug(f'Date: {export_date} - unsupported date - skipping')
            continue
        elif export_date < '1984-03-23':
            logging.debug(f'Date: {export_date} - no Landsat 5+ images before '
                         '1984-03-16 - skipping')
            continue
        logging.info(f'Date: {export_date}')

        export_id = export_id_fmt.format(
            product=tmax_name.lower(),
            date=export_dt.strftime('%Y%m%d'),
            export=datetime.datetime.today().strftime('%Y%m%d'))
        logging.debug('  Export ID: {}'.format(export_id))

        asset_id = asset_id_fmt.format(
            coll_id=tcorr_daily_coll_id,
            date=export_dt.strftime('%Y%m%d'),
            export=datetime.datetime.today().strftime('%Y%m%d'))
        logging.debug('  Asset ID: {}'.format(asset_id))

        if overwrite_flag:
            if export_id in tasks.keys():
                logging.debug('  Task already submitted, cancelling')
                ee.data.cancelTask(tasks[export_id]['id'])
            # This is intentionally not an "elif" so that a task can be
            # cancelled and an existing image/file/asset can be removed
            if asset_id in asset_list:
                logging.debug('  Asset already exists, removing')
                ee.data.deleteAsset(asset_id)
        else:
            if export_id in tasks.keys():
                logging.debug('  Task already submitted, exiting')
                continue
            elif asset_id in asset_list:
                logging.debug('  Asset already exists, skipping')
                continue

        # Build and merge the Landsat collections
        model_obj = ssebop.Collection(
            collections=collections,
            start_date=export_dt.strftime('%Y-%m-%d'),
            end_date=(export_dt + datetime.timedelta(days=1)).strftime(
                '%Y-%m-%d'),
            cloud_cover_max=float(ini['INPUTS']['cloud_cover']),
            geometry=export_geom,
            model_args=model_args,
            # filter_args=filter_args,
        )
        landsat_coll = model_obj.overpass(variables=['ndvi'])
        # wrs2_tiles_all = model_obj.get_image_ids()
        # pprint.pprint(landsat_coll.aggregate_array('system:id').getInfo())
        # input('ENTER')

        logging.debug('  Getting available WRS2 tile list')
        landsat_id_list = utils.get_info(landsat_coll.aggregate_array('system:id'))
        if not landsat_id_list:
            logging.info('  No available images - skipping')
            continue
        wrs2_tiles_all = set([id.split('_')[-2] for id in landsat_id_list])
        # print(wrs2_tiles_all)
        # print('\n')

        def tile_set_2_str(tiles):
            """Trying to build a more compact version of the WRS2 tile list"""
            tile_dict = defaultdict(list)
            for tile in tiles:
                tile_dict[int(tile[:3])].append(int(tile[3:]))
            tile_dict = {k: sorted(v) for k, v in tile_dict.items()}
            tile_str = json.dumps(tile_dict, sort_keys=True) \
                .replace('"', '').replace(' ', '')\
                .replace('{', '').replace('}', '')
            return tile_str
        wrs2_tiles_all_str = tile_set_2_str(wrs2_tiles_all)
        # pprint.pprint(wrs2_tiles_all_str)
        # print('\n')

        def tile_str_2_set(tile_str):
            # tile_dict = eval(tile_str)

            tile_set = set()
            for t in tile_str.replace('[', '').split('],'):
                path = int(t.split(':')[0])
                for row in t.split(':')[1].replace(']', '').split(','):
                    tile_set.add('{:03d}{:03d}'.format(path, int(row)))
            return tile_set
        # wrs2_tiles_all_dict = tile_str_2_set(wrs2_tiles_all_str)
        # pprint.pprint(wrs2_tiles_all_dict)


        # If overwriting, start a new export no matter what
        # The default is to no overwrite, so this mode will not be used often
        if not overwrite_flag:
            # Check if there are any previous images for this date
            # If so, only build a new Tcorr image if there are new wrs2_tiles
            #   that were not used in the previous image.
            # Should this code only be run in cron mode or is this the expected
            #   operation when (re)running for any date range?
            # Should we only test the last image
            # or all previous images for the date?
            logging.debug('  Checking for previous exports/versions of daily image')
            tcorr_daily_coll = ee.ImageCollection(tcorr_daily_coll_id)\
                .filterDate(export_date, next_date)\
                .limit(1, 'date_ingested', False)
            tcorr_daily_info = utils.get_info(tcorr_daily_coll)
            # pprint.pprint(tcorr_daily_info)
            # input('ENTER')

            if tcorr_daily_info['features']:
                # Assume we won't be building a new image and only set flag
                #   to True if the WRS2 tile lists are different
                export_flag = False

                # The ".limit(1, ..." on the tcorr_daily_coll above makes this
                # for loop and break statement unnecessary, but leaving for now
                for tcorr_img in tcorr_daily_info['features']:
                    # If the full WRS2 list is not present, rebuild the image
                    # This should only happen for much older Tcorr images
                    if 'wrs2_available' not in tcorr_img['properties'].keys():
                        logging.debug(
                            '    "wrs2_available" property not present in '
                            'previous export')
                        export_flag = True
                        break

                    # DEADBEEF - The wrs2_available property is now a string
                    # wrs2_tiles_old = set(tcorr_img['properties']['wrs2_available'].split(','))

                    # Convert available dict str to a list of path/rows
                    wrs2_tiles_old_str = tcorr_img['properties']['wrs2_available']
                    wrs2_tiles_old = tile_str_2_set(wrs2_tiles_old_str)

                    if wrs2_tiles_all != wrs2_tiles_old:
                        logging.debug('  Tile Lists')
                        logging.debug('  Previous: {}'.format(', '.join(
                            sorted(wrs2_tiles_old))))
                        logging.debug('  Available: {}'.format(', '.join(
                            sorted(wrs2_tiles_all))))
                        logging.debug('  New: {}'.format(', '.join(
                            sorted(wrs2_tiles_all.difference(wrs2_tiles_old)))))
                        logging.debug('  Dropped: {}'.format(', '.join(
                            sorted(wrs2_tiles_old.difference(wrs2_tiles_all)))))

                        export_flag = True
                        break

                if not export_flag:
                    logging.debug('  No new WRS2 tiles/images - skipping')
                    continue
                # else:
                #     logging.debug('    Building new version')
            else:
                logging.debug('    No previous exports')

        def tcorr_img_func(image):
            t_obj = ssebop.Image.from_landsat_c1_toa(
                ee.Image(image), **model_args)
            t_stats = ee.Dictionary(t_obj.tcorr_stats) \
                .combine({'tcorr_p5': 0, 'tcorr_count': 0}, overwrite=False)
            tcorr = ee.Number(t_stats.get('tcorr_p5'))
            count = ee.Number(t_stats.get('tcorr_count'))

            # Remove the merged collection indices from the system:index
            scene_id = ee.List(
                ee.String(image.get('system:index')).split('_')).slice(-3)
            scene_id = ee.String(scene_id.get(0)).cat('_') \
                .cat(ee.String(scene_id.get(1))).cat('_') \
                .cat(ee.String(scene_id.get(2)))

            return tmax_mask.add(tcorr) \
                .rename(['tcorr']) \
                .clip(image.geometry()) \
                .set({
                    'system:time_start': image.get('system:time_start'),
                    'scene_id': scene_id,
                    'wrs2_path': ee.Number.parse(scene_id.slice(5, 8)),
                    'wrs2_row': ee.Number.parse(scene_id.slice(8, 11)),
                    'wrs2_tile': scene_id.slice(5, 11),
                    'spacecraft_id': image.get('SPACECRAFT_ID'),
                    'tcorr': tcorr,
                    'count': count,
                })
        # Test for one image
        # pprint.pprint(tcorr_img_func(ee.Image(landsat_coll \
        #     .filterMetadata('WRS_PATH', 'equals', 36) \
        #     .filterMetadata('WRS_ROW', 'equals', 33).first())).getInfo())
        # input('ENTER')

        # (Re)build the Landsat collection from the image IDs
        landsat_coll = ee.ImageCollection(landsat_id_list)
        tcorr_img_coll = ee.ImageCollection(landsat_coll.map(tcorr_img_func)) \
            .filterMetadata('count', 'not_less_than',
                            float(ini['TCORR']['min_pixel_count']))

        # If there are no Tcorr values, return an empty image
        tcorr_img = ee.Algorithms.If(
            tcorr_img_coll.size().gt(0),
            tcorr_img_coll.median(),
            tmax_mask.updateMask(0))


        # Build the tile list as a string of a dictionary of paths and rows
        def tile_dict(path):
            # Get the row list for each path
            rows = tcorr_img_coll\
                .filterMetadata('wrs2_path', 'equals', path)\
                .aggregate_array('wrs2_row')
            # Convert rows to integers (otherwise they come back as floats)
            rows = ee.List(rows).sort().map(lambda row: ee.Number(row).int())
            return ee.Number(path).format('%d').cat(':[')\
                .cat(ee.List(rows).join(',')).cat(']')

        path_list = ee.List(tcorr_img_coll.aggregate_array('wrs2_path'))\
            .distinct().sort()
        wrs2_tile_str = ee.List(path_list.map(tile_dict)).join(',')
        # pprint.pprint(wrs2_tile_str.getInfo())
        # input('ENTER')

        # # DEADBEEF - This works but is really slow because of the getInfo
        # logging.debug('  Getting Tcorr collection tile list')
        # wrs2_tile_list = utils.get_info(
        #     tcorr_img_coll.aggregate_array('wrs2_tile'))
        # wrs2_tile_str = tile_set_2_str(wrs2_tile_list)
        # pprint.pprint(wrs2_tile_list)
        # pprint.pprint(wrs2_tile_str)
        # input('ENTER')

        # DEADBEEF - Old approach, tile lists for big areas are too long
        # def unique_properties(coll, property):
        #     return ee.String(ee.List(ee.Dictionary(
        #         coll.aggregate_histogram(property)).keys()).join(','))
        # wrs2_tile_list = ee.String('').cat(unique_properties(
        #     tcorr_img_coll, 'wrs2_tile'))
        # wrs2_tile_list = set([id.split('_')[-2] for id in wrs2_tile_list])


        def unique_properties(coll, property):
            return ee.String(ee.List(ee.Dictionary(
                coll.aggregate_histogram(property)).keys()).join(','))
        landsat_list = ee.String('').cat(unique_properties(
            tcorr_img_coll, 'spacecraft_id'))


        # Cast to float and set properties
        tcorr_img = ee.Image(tcorr_img).rename(['tcorr']).double() \
            .set({
                'system:time_start': utils.millis(export_dt),
                'date_ingested': datetime.datetime.today().strftime('%Y-%m-%d'),
                'date': export_dt.strftime('%Y-%m-%d'),
                'year': int(export_dt.year),
                'month': int(export_dt.month),
                'day': int(export_dt.day),
                'doy': int(export_dt.strftime('%j')),
                'cycle_day': ((export_dt - cycle_base_dt).days % 8) + 1,
                'landsat': landsat_list,
                'model_name': model_name,
                'model_version': ssebop.__version__,
                'tmax_source': tmax_source.upper(),
                'tmax_version': tmax_version.upper(),
                'wrs2_tiles': wrs2_tile_str,
                'wrs2_available': wrs2_tiles_all_str,
            })
        # pprint.pprint(tcorr_img.getInfo()['properties'])
        # input('ENTER')

        logging.debug('  Building export task')
        task = ee.batch.Export.image.toAsset(
            image=ee.Image(tcorr_img),
            description=export_id,
            assetId=asset_id,
            crs=export_crs,
            crsTransform='[' + ','.join(list(map(str, export_geo))) + ']',
            dimensions='{0}x{1}'.format(*export_shape),
        )

        logging.info('  Starting export task')
        utils.ee_task_start(task)

        # Pause before starting the next export task
        utils.delay_task(delay_time, max_ready)
        logging.debug('')
def main(ini_path=None, overwrite_flag=False, delay=0, key=None):
    """Test for differences in Tcorr from real-time and Collection 1

    Parameters
    ----------
    ini_path : str
        Input file path.
    overwrite_flag : bool, optional
        If True, overwrite existing files (the default is False).
    delay : float, optional
        Delay time between each export task (the default is 0).
    key : str, optional
        File path to an Earth Engine json key file (the default is None).

    """
    logging.info('\nTest Real Time Tcorr')

    # Hardcoding for now...
    tcorr_stats_path = r'C:\Users\mortonc\Google Drive\SSEBop\tcorr_realtime\tcorr_stats.csv'
    # tcorr_stats_path = r'C:\Projects\openet-ssebop\tcorr\tcorr_stats.csv'

    ini = utils.read_ini(ini_path)

    model_name = 'SSEBOP'
    # model_name = ini['INPUTS']['et_model'].upper()

    logging.info('\nInitializing Earth Engine')
    if key:
        logging.info('  Using service account key file: {}'.format(key))
        # The "EE_ACCOUNT" parameter is not used if the key file is valid
        ee.Initialize(ee.ServiceAccountCredentials('deadbeef', key_file=key))
    else:
        ee.Initialize()

    # Get a Tmax image to set the Tcorr values to
    logging.debug('\nTmax properties')
    tmax_name = ini[model_name]['tmax_source']
    tmax_source = tmax_name.split('_', 1)[0]
    tmax_version = tmax_name.split('_', 1)[1]
    tmax_coll_id = 'projects/usgs-ssebop/tmax/{}'.format(tmax_name.lower())
    tmax_coll = ee.ImageCollection(tmax_coll_id)
    tmax_mask = ee.Image(tmax_coll.first()).select([0]).multiply(0)
    logging.debug('  Collection: {}'.format(tmax_coll_id))
    logging.debug('  Source:  {}'.format(tmax_source))
    logging.debug('  Version: {}'.format(tmax_version))

    if not os.path.isfile(tcorr_stats_path):
        logging.debug('\nBuilding new Tcorr dataframe')
        tcorr_df = pd.DataFrame(columns=[
            'IMAGE_ID', 'IMAGE_DATE', 'COLLECTION', 'TCORR', 'COUNT',
            'EXPORT_DATE'
        ])
        c1_id_set = set()
        rt_id_set = set()
    else:
        logging.debug('\nLoading exist Tcorr dataframe')
        logging.debug('  {}'.format(tcorr_stats_path))
        tcorr_df = pd.read_csv(tcorr_stats_path)
        c1_id_set = set(tcorr_df.loc[tcorr_df['COLLECTION'] == 'C1',
                                     'IMAGE_ID'])
        rt_id_set = set(tcorr_df.loc[tcorr_df['COLLECTION'] == 'RT',
                                     'IMAGE_ID'])
        logging.debug(tcorr_df.head())

    # CGM - This seems like a silly way of getting the date as a datetime
    iter_end_dt = datetime.date.today().strftime('%Y-%m-%d')
    iter_end_dt = datetime.datetime.strptime(iter_end_dt, '%Y-%m-%d')
    iter_end_dt = iter_end_dt + datetime.timedelta(days=-1)
    # iter_end_dt = datetime.datetime.today() + datetime.timedelta(days=-1)
    iter_start_dt = iter_end_dt + datetime.timedelta(days=-64)
    logging.debug('Start Date: {}'.format(iter_start_dt.strftime('%Y-%m-%d')))
    logging.debug('End Date:   {}\n'.format(iter_end_dt.strftime('%Y-%m-%d')))

    # Iterate over date ranges
    for iter_dt in reversed(list(utils.date_range(iter_start_dt,
                                                  iter_end_dt))):
        logging.info('Date: {}'.format(iter_dt.strftime('%Y-%m-%d')))

        # Build and merge the Real-Time Landsat collections
        l8_rt_coll = ee.ImageCollection('LANDSAT/LC08/C01/T1_RT_TOA') \
            .filterDate(iter_dt, iter_dt + datetime.timedelta(days=1)) \
            .filterBounds(tmax_mask.geometry()) \
            .filterMetadata('CLOUD_COVER_LAND', 'less_than',
                            float(ini['INPUTS']['cloud_cover'])) \
            .filterMetadata('DATA_TYPE', 'equals', 'L1TP')
        l7_rt_coll = ee.ImageCollection('LANDSAT/LE07/C01/T1_RT_TOA') \
            .filterDate(iter_dt, iter_dt + datetime.timedelta(days=1)) \
            .filterBounds(tmax_mask.geometry()) \
            .filterMetadata('CLOUD_COVER_LAND', 'less_than',
                            float(ini['INPUTS']['cloud_cover'])) \
            .filterMetadata('DATA_TYPE', 'equals', 'L1TP')
        rt_coll = ee.ImageCollection(l8_rt_coll.merge(l7_rt_coll))

        # Build and merge the final Collection 1 collections
        l8_c1_coll = ee.ImageCollection('LANDSAT/LC08/C01/T1_TOA') \
            .filterDate(iter_dt, iter_dt + datetime.timedelta(days=1)) \
            .filterBounds(tmax_mask.geometry()) \
            .filterMetadata('CLOUD_COVER_LAND', 'less_than',
                            float(ini['INPUTS']['cloud_cover'])) \
            .filterMetadata('DATA_TYPE', 'equals', 'L1TP')
        l7_c1_coll = ee.ImageCollection('LANDSAT/LE07/C01/T1_TOA') \
            .filterDate(iter_dt, iter_dt + datetime.timedelta(days=1)) \
            .filterBounds(tmax_mask.geometry()) \
            .filterMetadata('CLOUD_COVER_LAND', 'less_than',
                            float(ini['INPUTS']['cloud_cover'])) \
            .filterMetadata('DATA_TYPE', 'equals', 'L1TP')
        c1_coll = ee.ImageCollection(l8_c1_coll.merge(l7_c1_coll))

        # Get the Image IDs that haven't been processed
        logging.info('  Getting Missing Asset IDs')
        rt_id_list = [
            id for id in rt_coll.aggregate_array('system:id').getInfo()
            if id.split('/')[-1] not in rt_id_set
        ]
        c1_id_list = [
            id for id in c1_coll.aggregate_array('system:id').getInfo()
            if id.split('/')[-1] not in c1_id_set
        ]

        if not rt_id_list and not c1_id_list:
            logging.info('  No new images, skipping date')
            continue

        logging.info('  Real-time')
        for asset_id in rt_id_list:
            logging.info('  {}'.format(asset_id))
            t_stats = ssebop.Image.from_landsat_c1_toa(ee.Image(asset_id))\
                .tcorr_stats\
                .getInfo()
            if t_stats['tcorr_value'] is None:
                t_stats['tcorr_value'] = ''
            image_id = asset_id.split('/')[-1]
            tcorr_df = tcorr_df.append(
                {
                    'IMAGE_ID':
                    image_id,
                    'IMAGE_DATE':
                    datetime.datetime.strptime(
                        image_id.split('_')[2], '%Y%m%d').strftime('%Y-%m-%d'),
                    'COLLECTION':
                    'RT',
                    'TCORR':
                    t_stats['tcorr_value'],
                    'COUNT':
                    t_stats['tcorr_count'],
                    'EXPORT_DATE':
                    datetime.datetime.today().strftime('%Y-%m-%d')
                },
                ignore_index=True)

        logging.info('  Collection 1')
        for asset_id in c1_id_list:
            logging.info('  {}'.format(asset_id))
            t_stats = ssebop.Image.from_landsat_c1_toa(ee.Image(asset_id))\
                .tcorr_stats\
                .getInfo()
            if t_stats['tcorr_value'] is None:
                t_stats['tcorr_value'] = ''
            image_id = asset_id.split('/')[-1]
            tcorr_df = tcorr_df.append(
                {
                    'IMAGE_ID':
                    asset_id.split('/')[-1],
                    'IMAGE_DATE':
                    datetime.datetime.strptime(
                        image_id.split('_')[2], '%Y%m%d').strftime('%Y-%m-%d'),
                    'COLLECTION':
                    'C1',
                    'TCORR':
                    t_stats['tcorr_value'],
                    'COUNT':
                    t_stats['tcorr_count'],
                    'EXPORT_DATE':
                    datetime.datetime.today().strftime('%Y-%m-%d')
                },
                ignore_index=True)

        # Export the current dataframe to disk
        logging.info('  Writing CSV')
        tcorr_df.sort_values(by=['IMAGE_ID', 'COLLECTION'], inplace=True)
        # tcorr_df.sort_values(by=['COLLECTION', 'IMAGE_ID'], inplace=True)
        tcorr_df.to_csv(tcorr_stats_path, index=None)
def main(ini_path=None):
    """Remove earlier versions of daily tcorr images

    Parameters
    ----------
    ini_path : str
        Input file path.

    """
    logging.info('\nRemove earlier versions of daily tcorr images')

    ini = utils.read_ini(ini_path)

    model_name = 'SSEBOP'
    # model_name = ini['INPUTS']['et_model'].upper()

    start_dt = datetime.datetime.strptime(
        ini['INPUTS']['start_date'], '%Y-%m-%d')
    end_dt = datetime.datetime.strptime(
        ini['INPUTS']['end_date'], '%Y-%m-%d')
    logging.debug('Start Date: {}'.format(start_dt.strftime('%Y-%m-%d')))
    logging.debug('End Date:   {}\n'.format(end_dt.strftime('%Y-%m-%d')))

    tcorr_source = 'IMAGE'

    try:
        tmax_source = str(ini[model_name]['tmax_source']).upper()
        logging.debug('\ntmax_source:\n  {}'.format(tmax_source))
    except KeyError:
        logging.error('  tmax_source: must be set in INI')
        sys.exit()

    # This check is limited to TOPOWX_MEDIAN_V0 because Tcorr images have only
    #   been built for that dataset
    if tmax_source.upper() not in ['TOPOWX_MEDIAN_V0']:
        raise ValueError('tmax_source must be TOPOWX')

    if (tmax_source.upper() == 'CIMIS' and
            ini['INPUTS']['end_date'] < '2003-10-01'):
        logging.error(
            '\nCIMIS is not currently available before 2003-10-01, exiting\n')
        sys.exit()
    elif (tmax_source.upper() == 'DAYMET' and
            ini['INPUTS']['end_date'] > '2017-12-31'):
        logging.warning(
            '\nDAYMET is not currently available past 2017-12-31, '
            'using median Tmax values\n')

    # Output tcorr daily image collection
    tcorr_daily_coll_id = '{}/{}_daily'.format(
        ini['EXPORT']['export_coll'], tmax_source.lower())
    logging.debug('  {}'.format(tcorr_daily_coll_id))


    if os.name == 'posix':
        shell_flag = False
    else:
        shell_flag = True


    logging.info('\nInitializing Earth Engine')
    ee.Initialize()
    utils.get_info(ee.Number(1))


    # Get list of existing images/files
    logging.debug('\nGetting GEE asset list')
    asset_list = utils.get_ee_assets(tcorr_daily_coll_id, shell_flag=shell_flag)
    logging.debug('Displaying first 10 images in collection')
    logging.debug(asset_list[:10])


    # Filter asset list by INI start_date and end_date
    logging.debug('\nFiltering by INI start_date and end_date')
    asset_re = re.compile('(\d{8})_\d{8}')
    asset_list = [
        asset_id for asset_id in asset_list
        if (asset_re.match(asset_id.split('/')[-1]) and
            start_dt <= datetime.datetime.strptime(asset_re.findall(asset_id.split('/')[-1])[0], '%Y%m%d') and
            datetime.datetime.strptime(asset_re.findall(asset_id.split('/')[-1])[0], '%Y%m%d') <= end_dt)]
    if not asset_list:
        logging.info('Empty asset ID list after filter by start/end date, '
                     'exiting')
        return True
    logging.debug('Displaying first 10 images in collection')
    logging.debug(asset_list[:10])


    # Group asset IDs by image date
    asset_id_dict = defaultdict(list)
    for asset_id in asset_list:
        asset_dt = datetime.datetime.strptime(
            asset_id.split('/')[-1].split('_')[0], '%Y%m%d')
        asset_id_dict[asset_dt.strftime('%Y-%m-%d')].append(asset_id)
    # pprint.pprint(asset_id_dict)


    # Remove all but the last image when sorted by export date
    logging.info('\nRemoving assets')
    for key, asset_list in asset_id_dict.items():
        # logging.debug('{}'.format(key))
        if len(asset_list) >=2:
            # logging.debug('\n  Keep: {}'.format(sorted(asset_list)[-1]))
            for asset_id in sorted(asset_list)[:-1]:
                logging.info('  Delete: {}'.format(asset_id))
                try:
                    ee.data.deleteAsset(asset_id)
                except Exception as e:
                    logging.info('  Unhandled exception, skipping')
                    logging.debug(e)
                    continue
Exemple #13
0
def main(ini_path=None,
         overwrite_flag=False,
         delay=0,
         key=None,
         cron_flag=False,
         reverse_flag=False):
    """Compute daily dT images

    Parameters
    ----------
    ini_path : str
        Input file path.
    overwrite_flag : bool, optional
        If True, generate new images (but with different export dates) even if
        the dates already have images.  If False, only generate images for
        dates that are missing. The default is False.
    delay : float, optional
        Delay time between each export task (the default is 0).
    key : str, optional
        File path to an Earth Engine json key file (the default is None).
    reverse_flag : bool, optional
        If True, process dates in reverse order.

    """
    logging.info('\nCompute daily dT images')

    ini = utils.read_ini(ini_path)

    model_name = 'SSEBOP'
    # model_name = ini['INPUTS']['et_model'].upper()

    if ini[model_name]['dt_source'].upper() == 'CIMIS':
        daily_coll_id = 'projects/climate-engine/cimis/daily'
    elif ini[model_name]['dt_source'].upper() == 'DAYMET':
        daily_coll_id = 'NASA/ORNL/DAYMET_V3'
    elif ini[model_name]['dt_source'].upper() == 'GRIDMET':
        daily_coll_id = 'IDAHO_EPSCOR/GRIDMET'
    else:
        raise ValueError('dt_source must be CIMIS, DAYMET, or GRIDMET')

    # Check dates
    if (ini[model_name]['dt_source'].upper() == 'CIMIS'
            and ini['INPUTS']['end_date'] < '2003-10-01'):
        logging.error(
            '\nCIMIS is not currently available before 2003-10-01, exiting\n')
        sys.exit()
    elif (ini[model_name]['dt_source'].upper() == 'DAYMET'
          and ini['INPUTS']['end_date'] > '2017-12-31'):
        logging.warning('\nDAYMET is not currently available past 2017-12-31, '
                        'using median Tmax values\n')
        # sys.exit()
    # elif (ini[model_name]['tmax_source'].upper() == 'TOPOWX' and
    #         ini['INPUTS']['end_date'] > '2017-12-31'):
    #     logging.warning(
    #         '\nDAYMET is not currently available past 2017-12-31, '
    #         'using median Tmax values\n')
    #     # sys.exit()

    logging.info('\nInitializing Earth Engine')
    if key:
        logging.info('  Using service account key file: {}'.format(key))
        # The "EE_ACCOUNT" parameter is not used if the key file is valid
        ee.Initialize(ee.ServiceAccountCredentials('deadbeef', key_file=key))
    else:
        ee.Initialize()

    # Output dT daily image collection
    dt_daily_coll_id = '{}/{}_daily'.format(
        ini['EXPORT']['export_coll'], ini[model_name]['dt_source'].lower())

    # Get an input image to set the dT values to
    logging.debug('\nInput properties')
    dt_name = ini[model_name]['dt_source']
    dt_source = dt_name.split('_', 1)[0]
    # dt_version = dt_name.split('_', 1)[1]
    daily_coll = ee.ImageCollection(daily_coll_id)
    dt_img = ee.Image(daily_coll.first()).select([0])
    dt_mask = dt_img.multiply(0)
    logging.debug('  Collection: {}'.format(daily_coll_id))
    logging.debug('  Source: {}'.format(dt_source))
    # logging.debug('  Version: {}'.format(dt_version))

    logging.debug('\nExport properties')
    export_proj = dt_img.projection().getInfo()
    export_geo = export_proj['transform']
    if 'crs' in export_proj.keys():
        export_crs = export_proj['crs']
    elif 'wkt' in export_proj.keys():
        export_crs = re.sub(',\s+', ',', export_proj['wkt'])
    export_shape = dt_img.getInfo()['bands'][0]['dimensions']
    export_extent = [
        export_geo[2], export_geo[5] + export_shape[1] * export_geo[4],
        export_geo[2] + export_shape[0] * export_geo[0], export_geo[5]
    ]
    logging.debug('  CRS:    {}'.format(export_crs))
    logging.debug('  Extent: {}'.format(export_extent))
    logging.debug('  Geo:    {}'.format(export_geo))
    logging.debug('  Shape:  {}'.format(export_shape))

    # Get current asset list
    if ini['EXPORT']['export_dest'].upper() == 'ASSET':
        logging.debug('\nGetting asset list')
        # DEADBEEF - daily is hardcoded in the asset_id for now
        asset_list = utils.get_ee_assets(dt_daily_coll_id)
    else:
        raise ValueError('invalid export destination: {}'.format(
            ini['EXPORT']['export_dest']))

    # Get current running tasks
    tasks = utils.get_ee_tasks()
    if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
        logging.debug('  Tasks: {}\n'.format(len(tasks)))
        input('ENTER')

    # Limit by year and month
    try:
        month_list = sorted(list(utils.parse_int_set(ini['INPUTS']['months'])))
    except:
        logging.info('\nINPUTS "months" parameter not set in the INI,'
                     '\n  Defaulting to all months (1-12)\n')
        month_list = list(range(1, 13))
    # try:
    #     year_list = sorted(list(utils.parse_int_set(ini['INPUTS']['years'])))
    # except:
    #     logging.info('\nINPUTS "years" parameter not set in the INI,'
    #                  '\n  Defaulting to all available years\n')
    #     year_list = []

    # Group asset IDs by image date
    asset_id_dict = defaultdict(list)
    for asset_id in asset_list:
        asset_dt = datetime.datetime.strptime(
            asset_id.split('/')[-1].split('_')[0], '%Y%m%d')
        asset_id_dict[asset_dt.strftime('%Y-%m-%d')].append(asset_id)
    # pprint.pprint(export_dt_dict)

    iter_start_dt = datetime.datetime.strptime(ini['INPUTS']['start_date'],
                                               '%Y-%m-%d')
    iter_end_dt = datetime.datetime.strptime(ini['INPUTS']['end_date'],
                                             '%Y-%m-%d')
    logging.debug('Start Date: {}'.format(iter_start_dt.strftime('%Y-%m-%d')))
    logging.debug('End Date:   {}\n'.format(iter_end_dt.strftime('%Y-%m-%d')))

    for export_dt in sorted(utils.date_range(iter_start_dt, iter_end_dt),
                            reverse=reverse_flag):
        export_date = export_dt.strftime('%Y-%m-%d')

        # if ((month_list and export_dt.month not in month_list) or
        #         (year_list and export_dt.year not in year_list)):
        if month_list and export_dt.month not in month_list:
            logging.debug(f'Date: {export_date} - month not in INI - skipping')
            continue
        elif export_date >= datetime.datetime.today().strftime('%Y-%m-%d'):
            logging.debug(f'Date: {export_date} - unsupported date - skipping')
            continue
        logging.info(f'Date: {export_date}')

        export_id = ini['EXPORT']['export_id_fmt'] \
            .format(
                product=dt_name.lower(),
                date=export_dt.strftime('%Y%m%d'),
                export=datetime.datetime.today().strftime('%Y%m%d'),
                dest=ini['EXPORT']['export_dest'].lower())
        logging.debug('  Export ID: {}'.format(export_id))

        if ini['EXPORT']['export_dest'] == 'ASSET':
            asset_id = '{}/{}_{}'.format(
                dt_daily_coll_id, export_dt.strftime('%Y%m%d'),
                datetime.datetime.today().strftime('%Y%m%d'))
            logging.debug('  Asset ID: {}'.format(asset_id))

        if overwrite_flag:
            if export_id in tasks.keys():
                logging.debug('  Task already submitted, cancelling')
                ee.data.cancelTask(tasks[export_id])
            # This is intentionally not an "elif" so that a task can be
            # cancelled and an existing image/file/asset can be removed
            if (ini['EXPORT']['export_dest'].upper() == 'ASSET'
                    and asset_id in asset_list):
                logging.debug('  Asset already exists, removing')
                ee.data.deleteAsset(asset_id)
        else:
            if export_id in tasks.keys():
                logging.debug('  Task already submitted, exiting')
                continue
            elif (ini['EXPORT']['export_dest'].upper() == 'ASSET'
                  and asset_id in asset_list):
                logging.debug(
                    '  Asset with current export date already exists, '
                    'skipping')
                continue
            elif len(asset_id_dict[export_date]) > 0:
                logging.debug(
                    '  Asset with earlier export date already exists, '
                    'skipping')
                continue

        # Compute dT using a fake Landsat image
        # The system:time_start property is the only needed value
        model_obj = ssebop.Image(
            ee.Image.constant([0, 0]).rename(['ndvi', 'lst']).set({
                'system:time_start':
                utils.millis(export_dt),
                'system:index':
                'LC08_043033_20170716',
                'system:id':
                'LC08_043033_20170716'
            }),
            dt_source=dt_source.upper(),
            elev_source='SRTM',
            dt_min=ini['SSEBOP']['dt_min'],
            dt_max=ini['SSEBOP']['dt_max'],
        )

        # Cast to float and set properties
        dt_img = model_obj.dt.float() \
            .set({
                'system:time_start': utils.millis(export_dt),
                'date_ingested': datetime.datetime.today().strftime('%Y-%m-%d'),
                'date': export_dt.strftime('%Y-%m-%d'),
                'year': int(export_dt.year),
                'month': int(export_dt.month),
                'day': int(export_dt.day),
                'doy': int(export_dt.strftime('%j')),
                'model_name': model_name,
                'model_version': ssebop.__version__,
                'dt_source': dt_source.upper(),
                # 'dt_version': dt_version.upper(),
            })

        # Build export tasks
        if ini['EXPORT']['export_dest'] == 'ASSET':
            logging.debug('  Building export task')
            task = ee.batch.Export.image.toAsset(
                image=ee.Image(dt_img),
                description=export_id,
                assetId=asset_id,
                crs=export_crs,
                crsTransform='[' + ','.join(list(map(str, export_geo))) + ']',
                dimensions='{0}x{1}'.format(*export_shape),
            )
            logging.info('  Starting export task')
            utils.ee_task_start(task)

        # Pause before starting next task
        utils.delay_task(delay)
        logging.debug('')
Exemple #14
0
def main(ini_path=None, overwrite_flag=False, delay_time=0, gee_key_file=None,
         max_ready=-1, reverse_flag=False):
    """Compute default Tcorr images by WRS2 tile

    Parameters
    ----------
    ini_path : str
        Input file path.
    overwrite_flag : bool, optional
        If True, overwrite existing files (the default is False).
    delay_time : float, optional
        Delay time in seconds between starting export tasks (or checking the
        number of queued tasks, see "max_ready" parameter).  The default is 0.
    gee_key_file : str, None, optional
        Earth Engine service account JSON key file (the default is None).
    max_ready: int, optional
        Maximum number of queued "READY" tasks.  The default is -1 which is
        implies no limit to the number of tasks that will be submitted.
    reverse_flag : bool, optional
        If True, process WRS2 tiles in reverse order.

    """
    logging.info('\nCompute default Tcorr images by WRS2 tile')

    ini = utils.read_ini(ini_path)

    model_name = 'SSEBOP'
    # model_name = ini['INPUTS']['et_model'].upper()

    tmax_name = ini[model_name]['tmax_source']

    export_id_fmt = 'tcorr_scene_{product}_{wrs2}_default'
    asset_id_fmt = '{coll_id}/{wrs2}'

    tcorr_default_coll_id = '{}/{}_default'.format(
        ini['EXPORT']['export_coll'], tmax_name.lower())

    wrs2_coll_id = 'projects/earthengine-legacy/assets/' \
                   'projects/usgs-ssebop/wrs2_descending_custom'
    wrs2_tile_field = 'WRS2_TILE'
    # wrs2_path_field = 'ROW'
    # wrs2_row_field = 'PATH'

    try:
        wrs2_tiles = str(ini['INPUTS']['wrs2_tiles'])
        wrs2_tiles = [x.strip() for x in wrs2_tiles.split(',')]
        wrs2_tiles = sorted([x.lower() for x in wrs2_tiles if x])
    except KeyError:
        wrs2_tiles = []
        logging.debug('  wrs2_tiles: not set in INI, defaulting to []')
    except Exception as e:
        raise e

    try:
        study_area_extent = str(ini['INPUTS']['study_area_extent']) \
            .replace('[', '').replace(']', '').split(',')
        study_area_extent = [float(x.strip()) for x in study_area_extent]
    except KeyError:
        study_area_extent = None
        logging.debug('  study_area_extent: not set in INI')
    except Exception as e:
        raise e

    try:
        tcorr_default = ini[model_name]['tcorr_default']
    except:
        tcorr_default = 0.978


    logging.info('\nInitializing Earth Engine')
    if gee_key_file:
        logging.info('  Using service account key file: {}'.format(gee_key_file))
        # The "EE_ACCOUNT" parameter is not used if the key file is valid
        ee.Initialize(ee.ServiceAccountCredentials('x', key_file=gee_key_file))
    else:
        ee.Initialize()


    logging.debug('\nTmax properties')
    tmax_source = tmax_name.split('_', 1)[0]
    tmax_version = tmax_name.split('_', 1)[1]
    tmax_coll_id = 'projects/earthengine-legacy/assets/' \
                   'projects/usgs-ssebop/tmax/{}'.format(tmax_name.lower())
    tmax_coll = ee.ImageCollection(tmax_coll_id)
    tmax_mask = ee.Image(tmax_coll.first()).select([0]).multiply(0)
    logging.debug('  Collection: {}'.format(tmax_coll_id))
    logging.debug('  Source: {}'.format(tmax_source))
    logging.debug('  Version: {}'.format(tmax_version))


    # # Get the Tcorr scene image collection properties
    # logging.debug('\nTcorr scene collection')
    # tcorr_scene_coll_id = '{}/{}_scene'.format(
    #     ini['EXPORT']['export_coll'], tmax_name.lower())


    logging.debug('\nExport properties')
    export_info = utils.get_info(ee.Image(tmax_mask))
    if 'daymet' in tmax_name.lower():
        # Custom smaller extent for DAYMET focused on CONUS
        export_extent = [-1999750, -1890500, 2500250, 1109500]
        export_shape = [4500, 3000]
        export_geo = [1000, 0, -1999750, 0, -1000, 1109500]
        # Custom medium extent for DAYMET of CONUS, Mexico, and southern Canada
        # export_extent = [-2099750, -3090500, 2900250, 1909500]
        # export_shape = [5000, 5000]
        # export_geo = [1000, 0, -2099750, 0, -1000, 1909500]
        export_crs = export_info['bands'][0]['crs']
    else:
        export_crs = export_info['bands'][0]['crs']
        export_geo = export_info['bands'][0]['crs_transform']
        export_shape = export_info['bands'][0]['dimensions']
        # export_geo = ee.Image(tmax_mask).projection().getInfo()['transform']
        # export_crs = ee.Image(tmax_mask).projection().getInfo()['crs']
        # export_shape = ee.Image(tmax_mask).getInfo()['bands'][0]['dimensions']
        export_extent = [
            export_geo[2], export_geo[5] + export_shape[1] * export_geo[4],
            export_geo[2] + export_shape[0] * export_geo[0], export_geo[5]]
    export_geom = ee.Geometry.Rectangle(
        export_extent, proj=export_crs, geodesic=False)
    logging.debug('  CRS: {}'.format(export_crs))
    logging.debug('  Extent: {}'.format(export_extent))
    logging.debug('  Geo: {}'.format(export_geo))
    logging.debug('  Shape: {}'.format(export_shape))


    if study_area_extent is None:
        if 'daymet' in tmax_name.lower():
            # CGM - For now force DAYMET to a slightly smaller "CONUS" extent
            study_area_extent = [-125, 25, -65, 50]
            # study_area_extent = [-125, 25, -65, 49]
            # study_area_extent =  [-125, 25, -65, 52]
        elif 'cimis' in tmax_name.lower():
            study_area_extent = [-124, 35, -119, 42]
        else:
            # TODO: Make sure output from bounds is in WGS84
            study_area_extent = tmax_mask.geometry().bounds().getInfo()
        logging.debug(f'\nStudy area extent not set in INI, '
                      f'default to {study_area_extent}')
    study_area_geom = ee.Geometry.Rectangle(
        study_area_extent, proj='EPSG:4326', geodesic=False)


    if not ee.data.getInfo(tcorr_default_coll_id):
        logging.info('\nExport collection does not exist and will be built'
                     '\n  {}'.format(tcorr_default_coll_id))
        input('Press ENTER to continue')
        ee.data.createAsset({'type': 'IMAGE_COLLECTION'}, tcorr_default_coll_id)

    # Get current asset list
    logging.debug('\nGetting GEE asset list')
    asset_list = utils.get_ee_assets(tcorr_default_coll_id)
    # if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
    #     pprint.pprint(asset_list[:10])

    # Get current running tasks
    tasks = utils.get_ee_tasks()
    if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
        logging.debug('  Tasks: {}\n'.format(len(tasks)))
        input('ENTER')


    # Get the list of WRS2 tiles that intersect the data area and study area
    wrs2_coll = ee.FeatureCollection(wrs2_coll_id)\
        .filterBounds(export_geom)\
        .filterBounds(study_area_geom)
    if wrs2_tiles:
        wrs2_coll = wrs2_coll.filter(ee.Filter.inList(wrs2_tile_field, wrs2_tiles))
    wrs2_info = wrs2_coll.getInfo()['features']


    for wrs2_ftr in sorted(wrs2_info,
                           key=lambda k: k['properties']['WRS2_TILE'],
                           reverse=reverse_flag):
        wrs2_tile = wrs2_ftr['properties'][wrs2_tile_field]
        logging.info('{}'.format(wrs2_tile))

        wrs2_path = int(wrs2_tile[1:4])
        wrs2_row = int(wrs2_tile[5:8])
        # wrs2_path = wrs2_ftr['properties'][wrs2_path_field]
        # wrs2_row = wrs2_ftr['properties'][wrs2_row_field]

        export_id = export_id_fmt.format(
            product=tmax_name.lower(), wrs2=wrs2_tile)
        logging.debug('  Export ID: {}'.format(export_id))

        asset_id = asset_id_fmt.format(
            coll_id=tcorr_default_coll_id, wrs2=wrs2_tile)
        logging.debug('  Asset ID: {}'.format(asset_id))

        if overwrite_flag:
            if export_id in tasks.keys():
                logging.debug('  Task already submitted, cancelling')
                ee.data.cancelTask(tasks[export_id]['id'])
            # This is intentionally not an "elif" so that a task can be
            # cancelled and an existing image/file/asset can be removed
            if asset_id in asset_list:
                logging.debug('  Asset already exists, removing')
                ee.data.deleteAsset(asset_id)
        else:
            if export_id in tasks.keys():
                logging.debug('  Task already submitted, exiting')
                continue
            elif asset_id in asset_list:
                logging.debug('  Asset already exists, skipping')
                continue

        # Clip the mask image to the Landsat footprint
        mask_img = tmax_mask.add(1).clip(ee.Geometry(wrs2_ftr['geometry']))

        # Apply the default Tcorr value and then clear the transparency mask
        output_img = mask_img.multiply(0.978).rename(['tcorr'])\
            .updateMask(mask_img.unmask(0))

        # # Clip to the Landsat image footprint
        # output_img = tmax_mask.add(tcorr_default) \
        #     .rename(['tcorr']) \
        #     .clip(ee.Geometry(wrs2_ftr['geometry']))
        # # Clear the transparency mask
        # output_img = output_img.updateMask(output_img.unmask(0))

        output_img = output_img.set({
            'date_ingested': datetime.datetime.today().strftime('%Y-%m-%d'),
            'model_name': model_name,
            'model_version': ssebop.__version__,
            # 'system:time_start': utils.millis(start_dt),
            'tcorr_value': tcorr_default,
            'tcorr_index': DEFAULT_TCORR_INDEX,
            'tmax_source': tmax_source.upper(),
            'tmax_version': tmax_version.upper(),
            'wrs2_path': wrs2_path,
            'wrs2_row': wrs2_row,
            'wrs2_tile': wrs2_tile,
        })
        # pprint.pprint(output_img.getInfo())
        # input('ENTER')

        logging.debug('  Building export task')
        task = ee.batch.Export.image.toAsset(
            image=output_img,
            description=export_id,
            assetId=asset_id,
            crs=export_crs,
            crsTransform='[' + ','.join(list(map(str, export_geo))) + ']',
            dimensions='{0}x{1}'.format(*export_shape),
        )

        logging.info('  Starting export task')
        utils.ee_task_start(task)

        # Pause before starting the next export task
        utils.delay_task(delay_time, max_ready)
        logging.debug('')
Exemple #15
0
def main(ini_path=None,
         overwrite_flag=False,
         delay=0,
         key=None,
         cron_flag=False,
         reverse_flag=False):
    """Compute daily Tcorr images

    Parameters
    ----------
    ini_path : str
        Input file path.
    overwrite_flag : bool, optional
        If True, overwrite existing files if the export dates are the same and
        generate new images (but with different export dates) even if the tile
        lists are the same.  The default is False.
    delay : float, optional
        Delay time between each export task (the default is 0).
    key : str, optional
        File path to an Earth Engine json key file (the default is None).
    cron_flag : bool, optional
        If True, only compute Tcorr daily image if existing image does not have
        all available image (using the 'wrs2_tiles' property) and limit the
        date range to the last 64 days (~2 months).
    reverse_flag : bool, optional
        If True, process dates in reverse order.
    """
    logging.info('\nCompute daily Tcorr images')

    ini = utils.read_ini(ini_path)

    model_name = 'SSEBOP'
    # model_name = ini['INPUTS']['et_model'].upper()

    if (ini[model_name]['tmax_source'].upper() == 'CIMIS'
            and ini['INPUTS']['end_date'] < '2003-10-01'):
        logging.error(
            '\nCIMIS is not currently available before 2003-10-01, exiting\n')
        sys.exit()
    elif (ini[model_name]['tmax_source'].upper() == 'DAYMET'
          and ini['INPUTS']['end_date'] > '2017-12-31'):
        logging.warning('\nDAYMET is not currently available past 2017-12-31, '
                        'using median Tmax values\n')
        # sys.exit()
    # elif (ini[model_name]['tmax_source'].upper() == 'TOPOWX' and
    #         ini['INPUTS']['end_date'] > '2017-12-31'):
    #     logging.warning(
    #         '\nDAYMET is not currently available past 2017-12-31, '
    #         'using median Tmax values\n')
    #     # sys.exit()

    logging.info('\nInitializing Earth Engine')
    if key:
        logging.info('  Using service account key file: {}'.format(key))
        # The "EE_ACCOUNT" parameter is not used if the key file is valid
        ee.Initialize(ee.ServiceAccountCredentials('deadbeef', key_file=key))
    else:
        ee.Initialize()

    # Output Tcorr daily image collection
    tcorr_daily_coll_id = '{}/{}_daily'.format(
        ini['EXPORT']['export_coll'], ini[model_name]['tmax_source'].lower())

    # Get a Tmax image to set the Tcorr values to
    logging.debug('\nTmax properties')
    tmax_name = ini[model_name]['tmax_source']
    tmax_source = tmax_name.split('_', 1)[0]
    tmax_version = tmax_name.split('_', 1)[1]
    tmax_coll_id = 'projects/usgs-ssebop/tmax/{}'.format(tmax_name.lower())
    tmax_coll = ee.ImageCollection(tmax_coll_id)
    tmax_mask = ee.Image(tmax_coll.first()).select([0]).multiply(0)
    logging.debug('  Collection: {}'.format(tmax_coll_id))
    logging.debug('  Source: {}'.format(tmax_source))
    logging.debug('  Version: {}'.format(tmax_version))

    logging.debug('\nExport properties')
    export_geo = ee.Image(tmax_mask).projection().getInfo()['transform']
    export_crs = ee.Image(tmax_mask).projection().getInfo()['crs']
    export_shape = ee.Image(tmax_mask).getInfo()['bands'][0]['dimensions']
    export_extent = [
        export_geo[2], export_geo[5] + export_shape[1] * export_geo[4],
        export_geo[2] + export_shape[0] * export_geo[0], export_geo[5]
    ]
    logging.debug('  CRS: {}'.format(export_crs))
    logging.debug('  Extent: {}'.format(export_extent))
    logging.debug('  Geo: {}'.format(export_geo))
    logging.debug('  Shape: {}'.format(export_shape))

    # # Limit export to a user defined study area or geometry?
    # export_geom = ee.Geometry.Rectangle(
    #     [-125, 24, -65, 50], proj='EPSG:4326', geodesic=False)  # CONUS
    # export_geom = ee.Geometry.Rectangle(
    #     [-124, 35, -119, 42], proj='EPSG:4326', geodesic=False)  # California

    # If cell_size parameter is set in the INI,
    # adjust the output cellsize and recompute the transform and shape
    try:
        export_cs = float(ini['EXPORT']['cell_size'])
        export_shape = [
            int(math.ceil(abs((export_shape[0] * export_geo[0]) / export_cs))),
            int(math.ceil(abs((export_shape[1] * export_geo[4]) / export_cs)))
        ]
        export_geo = [
            export_cs, 0.0, export_geo[2], 0.0, -export_cs, export_geo[5]
        ]
        logging.debug('  Custom export cell size: {}'.format(export_cs))
        logging.debug('  Geo: {}'.format(export_geo))
        logging.debug('  Shape: {}'.format(export_shape))
    except KeyError:
        pass

    # Get current asset list
    if ini['EXPORT']['export_dest'].upper() == 'ASSET':
        logging.debug('\nGetting asset list')
        # DEADBEEF - daily is hardcoded in the asset_id for now
        asset_list = utils.get_ee_assets(tcorr_daily_coll_id)
    else:
        raise ValueError('invalid export destination: {}'.format(
            ini['EXPORT']['export_dest']))

    # Get current running tasks
    tasks = utils.get_ee_tasks()
    if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
        logging.debug('  Tasks: {}\n'.format(len(tasks)))
        input('ENTER')

    collections = [x.strip() for x in ini['INPUTS']['collections'].split(',')]

    # Limit by year and month
    try:
        month_list = sorted(list(utils.parse_int_set(ini['TCORR']['months'])))
    except:
        logging.info('\nTCORR "months" parameter not set in the INI,'
                     '\n  Defaulting to all months (1-12)\n')
        month_list = list(range(1, 13))
    try:
        year_list = sorted(list(utils.parse_int_set(ini['TCORR']['years'])))
    except:
        logging.info('\nTCORR "years" parameter not set in the INI,'
                     '\n  Defaulting to all available years\n')
        year_list = []

    # Key is cycle day, value is a reference date on that cycle
    # Data from: https://landsat.usgs.gov/landsat_acq
    # I only need to use 8 cycle days because of 5/7 and 7/8 are offset
    cycle_dates = {
        7: '1970-01-01',
        8: '1970-01-02',
        1: '1970-01-03',
        2: '1970-01-04',
        3: '1970-01-05',
        4: '1970-01-06',
        5: '1970-01-07',
        6: '1970-01-08',
    }
    # cycle_dates = {
    #     1:  '2000-01-06',
    #     2:  '2000-01-07',
    #     3:  '2000-01-08',
    #     4:  '2000-01-09',
    #     5:  '2000-01-10',
    #     6:  '2000-01-11',
    #     7:  '2000-01-12',
    #     8:  '2000-01-13',
    #     # 9:  '2000-01-14',
    #     # 10: '2000-01-15',
    #     # 11: '2000-01-16',
    #     # 12: '2000-01-01',
    #     # 13: '2000-01-02',
    #     # 14: '2000-01-03',
    #     # 15: '2000-01-04',
    #     # 16: '2000-01-05',
    # }
    cycle_base_dt = datetime.datetime.strptime(cycle_dates[1], '%Y-%m-%d')

    if cron_flag:
        # CGM - This seems like a silly way of getting the date as a datetime
        #   Why am I doing this and not using the commented out line?
        iter_end_dt = datetime.date.today().strftime('%Y-%m-%d')
        iter_end_dt = datetime.datetime.strptime(iter_end_dt, '%Y-%m-%d')
        iter_end_dt = iter_end_dt + datetime.timedelta(days=-4)
        # iter_end_dt = datetime.datetime.today() + datetime.timedelta(days=-1)
        iter_start_dt = iter_end_dt + datetime.timedelta(days=-64)
    else:
        iter_start_dt = datetime.datetime.strptime(ini['INPUTS']['start_date'],
                                                   '%Y-%m-%d')
        iter_end_dt = datetime.datetime.strptime(ini['INPUTS']['end_date'],
                                                 '%Y-%m-%d')
    logging.debug('Start Date: {}'.format(iter_start_dt.strftime('%Y-%m-%d')))
    logging.debug('End Date:   {}\n'.format(iter_end_dt.strftime('%Y-%m-%d')))

    for export_dt in sorted(utils.date_range(iter_start_dt, iter_end_dt),
                            reverse=reverse_flag):
        export_date = export_dt.strftime('%Y-%m-%d')
        next_date = (export_dt +
                     datetime.timedelta(days=1)).strftime('%Y-%m-%d')
        # if ((month_list and export_dt.month not in month_list) or
        #         (year_list and export_dt.year not in year_list)):
        if month_list and export_dt.month not in month_list:
            logging.debug(f'Date: {export_date} - month not in INI - skipping')
            continue
        elif export_date >= datetime.datetime.today().strftime('%Y-%m-%d'):
            logging.debug(f'Date: {export_date} - unsupported date - skipping')
            continue
        elif export_date < '1984-03-23':
            logging.debug(f'Date: {export_date} - no Landsat 5+ images before '
                          '1984-03-16 - skipping')
            continue
        logging.info(f'Date: {export_date}')

        export_id = ini['EXPORT']['export_id_fmt'] \
            .format(
                product=tmax_name.lower(),
                date=export_dt.strftime('%Y%m%d'),
                export=datetime.datetime.today().strftime('%Y%m%d'),
                dest=ini['EXPORT']['export_dest'].lower())
        logging.debug('  Export ID: {}'.format(export_id))

        if ini['EXPORT']['export_dest'] == 'ASSET':
            asset_id = '{}/{}_{}'.format(
                tcorr_daily_coll_id, export_dt.strftime('%Y%m%d'),
                datetime.datetime.today().strftime('%Y%m%d'))
            logging.debug('  Asset ID: {}'.format(asset_id))

        if overwrite_flag:
            if export_id in tasks.keys():
                logging.debug('  Task already submitted, cancelling')
                ee.data.cancelTask(tasks[export_id])
            # This is intentionally not an "elif" so that a task can be
            # cancelled and an existing image/file/asset can be removed
            if (ini['EXPORT']['export_dest'].upper() == 'ASSET'
                    and asset_id in asset_list):
                logging.debug('  Asset already exists, removing')
                ee.data.deleteAsset(asset_id)
        else:
            if export_id in tasks.keys():
                logging.debug('  Task already submitted, exiting')
                continue
            elif (ini['EXPORT']['export_dest'].upper() == 'ASSET'
                  and asset_id in asset_list):
                logging.debug('  Asset already exists, skipping')
                continue

        # Build and merge the Landsat collections
        model_obj = ssebop.Collection(
            collections=collections,
            start_date=export_dt.strftime('%Y-%m-%d'),
            end_date=(export_dt +
                      datetime.timedelta(days=1)).strftime('%Y-%m-%d'),
            cloud_cover_max=float(ini['INPUTS']['cloud_cover']),
            geometry=tmax_mask.geometry(),
            # model_args=model_args,
            # filter_args=filter_args,
        )
        landsat_coll = model_obj.overpass(variables=['ndvi'])
        # wrs2_tiles_all = model_obj.get_image_ids()
        # pprint.pprint(landsat_coll.aggregate_array('system:id').getInfo())
        # input('ENTER')

        logging.debug('  Getting available WRS2 tile list')
        landsat_id_list = landsat_coll.aggregate_array('system:id').getInfo()
        wrs2_tiles_all = set([id.split('_')[-2] for id in landsat_id_list])
        if not wrs2_tiles_all:
            logging.info('  No available images - skipping')
            continue

        # If overwriting, start a new export no matter what
        # The default is to no overwrite, so this mode will not be used often
        if not overwrite_flag:
            # Check if there are any previous images for this date
            # If so, only build a new Tcorr image if there are new wrs2_tiles
            #   that were not used in the previous image.
            # Should this code only be run in cron mode or is this the expected
            #   operation when (re)running for any date range?
            # Should we only test the last image
            # or all previous images for the date?
            logging.debug(
                '  Checking for previous exports/versions of daily image')
            tcorr_daily_coll = ee.ImageCollection(tcorr_daily_coll_id)\
                .filterDate(export_date, next_date)\
                .limit(1, 'date_ingested', False)
            tcorr_daily_info = tcorr_daily_coll.getInfo()

            if tcorr_daily_info['features']:
                # Assume we won't be building a new image and only set flag
                #   to True if the WRS2 tile lists are different
                export_flag = False

                # The ".limit(1, ..." on the tcorr_daily_coll above makes this
                # for loop and break statement unnecessary, but leaving for now
                for tcorr_img in tcorr_daily_info['features']:
                    # If the full WRS2 list is not present, rebuild the image
                    # This should only happen for much older Tcorr images
                    if 'wrs2_available' not in tcorr_img['properties'].keys():
                        logging.debug(
                            '    "wrs2_available" property not present in '
                            'previous export')
                        export_flag = True
                        break

                    wrs2_tiles_old = set(
                        tcorr_img['properties']['wrs2_available'].split(','))

                    if wrs2_tiles_all != wrs2_tiles_old:
                        logging.debug('  Tile Lists')
                        logging.debug('  Previous: {}'.format(', '.join(
                            sorted(wrs2_tiles_old))))
                        logging.debug('  Available: {}'.format(', '.join(
                            sorted(wrs2_tiles_all))))
                        logging.debug('  New: {}'.format(', '.join(
                            sorted(
                                wrs2_tiles_all.difference(wrs2_tiles_old)))))
                        logging.debug('  Dropped: {}'.format(', '.join(
                            sorted(
                                wrs2_tiles_old.difference(wrs2_tiles_all)))))

                        export_flag = True
                        break

                if not export_flag:
                    logging.debug('  No new WRS2 tiles/images - skipping')
                    continue
                # else:
                #     logging.debug('    Building new version')
            else:
                logging.debug('    No previous exports')

        def tcorr_img_func(image):
            t_stats = ssebop.Image.from_landsat_c1_toa(
                    ee.Image(image),
                    tdiff_threshold=float(ini[model_name]['tdiff_threshold'])) \
                .tcorr_stats
            t_stats = ee.Dictionary(t_stats) \
                .combine({'tcorr_p5': 0, 'tcorr_count': 0},
                         overwrite=False)
            tcorr = ee.Number(t_stats.get('tcorr_p5'))
            count = ee.Number(t_stats.get('tcorr_count'))

            # Remove the merged collection indices from the system:index
            scene_id = ee.List(
                ee.String(image.get('system:index')).split('_')).slice(-3)
            scene_id = ee.String(scene_id.get(0)).cat('_') \
                .cat(ee.String(scene_id.get(1))).cat('_') \
                .cat(ee.String(scene_id.get(2)))

            return tmax_mask.add(tcorr) \
                .rename(['tcorr']) \
                .clip(image.geometry()) \
                .set({
                    'system:time_start': image.get('system:time_start'),
                    'scene_id': scene_id,
                    'wrs2_tile': scene_id.slice(5, 11),
                    'spacecraft_id': image.get('SPACECRAFT_ID'),
                    'tcorr': tcorr,
                    'count': count,
                })

        # Test for one image
        # pprint.pprint(tcorr_img_func(ee.Image(landsat_coll \
        #     .filterMetadata('WRS_PATH', 'equals', 36) \
        #     .filterMetadata('WRS_ROW', 'equals', 33).first())).getInfo())
        # input('ENTER')

        # (Re)build the Landsat collection from the image IDs
        landsat_coll = ee.ImageCollection(landsat_id_list)
        tcorr_img_coll = ee.ImageCollection(landsat_coll.map(tcorr_img_func)) \
            .filterMetadata('count', 'not_less_than',
                            float(ini['TCORR']['min_pixel_count']))

        # If there are no Tcorr values, return an empty image
        tcorr_img = ee.Algorithms.If(tcorr_img_coll.size().gt(0),
                                     tcorr_img_coll.median(),
                                     tmax_mask.updateMask(0))

        def unique_properties(coll, property):
            return ee.String(
                ee.List(
                    ee.Dictionary(
                        coll.aggregate_histogram(property)).keys()).join(','))

        wrs2_tile_list = ee.String('').cat(
            unique_properties(tcorr_img_coll, 'wrs2_tile'))
        landsat_list = ee.String('').cat(
            unique_properties(tcorr_img_coll, 'spacecraft_id'))

        # Cast to float and set properties
        tcorr_img = ee.Image(tcorr_img).rename(['tcorr']).double() \
            .set({
                'system:time_start': utils.millis(export_dt),
                'date_ingested': datetime.datetime.today().strftime('%Y-%m-%d'),
                'date': export_dt.strftime('%Y-%m-%d'),
                'year': int(export_dt.year),
                'month': int(export_dt.month),
                'day': int(export_dt.day),
                'doy': int(export_dt.strftime('%j')),
                'cycle_day': ((export_dt - cycle_base_dt).days % 8) + 1,
                'landsat': landsat_list,
                'model_name': model_name,
                'model_version': ssebop.__version__,
                'tmax_source': tmax_source.upper(),
                'tmax_version': tmax_version.upper(),
                'wrs2_tiles': wrs2_tile_list,
                'wrs2_available': ','.join(sorted(wrs2_tiles_all)),
            })

        # Build export tasks
        if ini['EXPORT']['export_dest'] == 'ASSET':
            logging.debug('  Building export task')
            task = ee.batch.Export.image.toAsset(
                image=ee.Image(tcorr_img),
                description=export_id,
                assetId=asset_id,
                crs=export_crs,
                crsTransform='[' + ','.join(list(map(str, export_geo))) + ']',
                dimensions='{0}x{1}'.format(*export_shape),
            )
            logging.info('  Starting export task')
            utils.ee_task_start(task)

        # Pause before starting next task
        utils.delay_task(delay)
        logging.debug('')