Пример #1
0
    def get(self):
        ee.Initialize(config.EE_CREDENTIALS)
        ee.data.setDeadline(180000)  # 3 minutes

        import loggerFunctions
        logger = loggerFunctions.set_logger('MAX DATES')
        try:
            maxDates = collection_dataStore.get_all_maxDates()
        except:
            time.sleep(30)
            maxDates = collection_dataStore.get_all_maxDates()

        logger.info(maxDates)

        for key, value in maxDates.iteritems():
            #Check that date is of format yyyy-mm-dd
            err = 0
            try:
                dt.datetime.strptime(value, "%Y-%m-%d")
            except:
                err = 1
            if err:
                # 3600s = 1 hr..in seconds (not milliseconds)
                #memcache.add(key=key, value=value, time=3600)
                memcache.add(key=key, value=value, time=0)  #no expiration
                #logger.info('added'+key+value)
            #else #wait until next time

        out_str = '<h4>Max Dates</h4>'
        for key, value in sorted(maxDates.iteritems()):
            out_str += '{} = {}'.format(key, value) + '<br><br>'
        self.response.out.write(out_str)
Пример #2
0
    def get(self):
        ee.Initialize(config.EE_CREDENTIALS)
        ee.data.setDeadline(180000)  # 3 minutes

        import loggerFunctions
        logger = loggerFunctions.set_logger('MAX DATES')
        try:
            maxDates = collection_dataStore.get_all_maxDates()
        except:
            time.sleep(30)
            maxDates = collection_dataStore.get_all_maxDates()

        logger.info(maxDates)

        for key, value in maxDates.iteritems():
            #Check that date is of format yyyy-mm-dd
            err = 0
            try:
                dt.datetime.strptime(value, "%Y-%m-%d")
            except:
                err = 1
            if err:
                # 3600s = 1 hr..in seconds (not milliseconds)
                #memcache.add(key=key, value=value, time=3600)
                memcache.add(key=key, value=value, time=0) #no expiration
                #logger.info('added'+key+value)
            #else #wait until next time

        out_str = '<h4>Max Dates</h4>'
        for key, value in sorted(maxDates.iteritems()):
            out_str += '{} = {}'.format(key, value) + '<br><br>'
        self.response.out.write(out_str)
Пример #3
0
def get_time_series(template_values, shape_type):
    """
    Time series request for points or fusion tables
    (shape_type p or ft, respectively)
    Args:
        template_values: a dictionary of user and system input
        shape_type: p (points) or ft (fusion tables)
    Returns:
        updated template_values with time series data
    """
    #================================
    logger = loggerFunctions.set_logger('points_debug')
    #Catch threading error with error
    error = None
    #Set the time variables variables (depends on timeSeriesCalc)
    time_vars = set_time_vars_for_processing(template_values, 1)
    #Set collection and update template_values
    productTS = template_values['productTS']
    collection = set_collection(template_values, productTS,
                                template_values['variableTS'],
                                template_values['modelTS'],
                                template_values['scenarioTS'], time_vars, 1,
                                logger)
    if productTS == 'MACA' or productTS == 'NASANEX':
        scenarioTS = template_values['scenarioTS']
        collection = collection.filterMetadata("scenario", "equals",
                                               scenarioTS)
    collection2 = None
    if template_values['variable2display'] != 'none':
        time_vars2 = set_time_vars_for_processing(template_values, 2)
        product2TS = template_values['product2TS']
        scenario2TS = template_values['scenario2TS']
        collection2 = set_collection(template_values, product2TS,
                                     template_values['variable2TS'],
                                     template_values['model2TS'],
                                     template_values['scenario2TS'],
                                     time_vars2, 2, logger)
        if product2TS == 'MACA' or product2TS == 'NASANEX':
            collection2 = collection2.filterMetadata("scenario", "equals",
                                                     scenario2TS)
    #================================
    #Run threads and update template variables
    extra_template_values, timeSeriesTextData, timeSeriesGraphData = run_threads(
        collection, template_values, time_vars, 1, shape_type, logger)
    extra_template_values['timeSeriesTextData'] = timeSeriesTextData
    extra_template_values['timeSeriesGraphData'] = json.dumps(
        timeSeriesGraphData)
    template_values.update(extra_template_values)
    if collection2:
        extra_template_values, timeSeriesTextData, timeSeriesGraphData = run_threads(
            collection2, template_values, time_vars2, 2, shape_type, logger)
        extra_template_values['timeSeriesTextData2'] = timeSeriesTextData
        extra_template_values['timeSeriesGraphData2'] = json.dumps(
            timeSeriesGraphData)
        template_values.update(extra_template_values)
    #================================
    return template_values
Пример #4
0
def processPointData(template_values, extra_template_values, time_vars, point_info, logger,timeSeriesTextData,timeSeriesGraphData,varnum):
    logger = loggerFunctions.set_logger('info')

    name = point_info['name']
    altname = point_info['altname']
    marker_color = point_info['marker_color']

    #Initialize the data dicts
    data_dict_ts = formatData.initialize_timeSeriesTextDataDict(name, altname=altname)
    data_dict_graph = formatData.initialize_timeSeriesGraphDataDict(name,marker_color,altname=altname)

    #logger.info('**********timeSeriesCalc************'+template_values['timeSeriesCalc'])
    climoData=[]
    percentileData = []
    #process data according to what timeSereisCalc is
    if template_values['timeSeriesCalc'] == 'days':
        data_dict_ts['Data'],data_dict_graph['Data'] = process_daily_threadData(
            point_info['data'],template_values,varnum)
    elif template_values['timeSeriesCalc'] == 'interannual':
        sS_doy = time_vars['seasonStart_doy']
        sE_doy = time_vars['seasonEnd_doy']
        data_dict_ts['Data'],data_dict_graph['Data'] = process_interannual_threadData(
            point_info['data'], template_values, sS_doy, sE_doy, varnum,logger)
    elif template_values['timeSeriesCalc'] == 'intraannual':
        #logger.info('**********inside intrannaul************')
        #get data for all years stored in dict with keys year
        doyS = time_vars['doyStart']
        doyE = time_vars['doyEnd']
        yS = time_vars['yearStart']
        yE = time_vars['yearEnd']
        year_dict_ts,year_dict_graph, climoData, percentileData =process_intraannual_threadData(
            point_info['data'], template_values,doyS, doyE, yS, yE, varnum,logger)
        for year in range(int(yS),int(yE) +1):
            '''
            if year in year_dict_ts.keys() and year_dict_ts[year]:
                data_dict_ts['Data'].append(year_dict_ts[year])
            if year in year_dict_graph.keys() and year_dict_graph[year]:
                data_dict_graph['Data'].append(year_dict_graph[year])
            '''
            data_dict_ts['Data'].append(year_dict_ts[year])
            data_dict_graph['Data'].append(year_dict_graph[year])
    timeSeriesTextData.append(data_dict_ts)
    timeSeriesGraphData.append(data_dict_graph)

    if climoData:
        if varnum == 1:
            extra_template_values['climoData'] = json.dumps(climoData)
        if varnum == 2:
            extra_template_values['climoData2'] = json.dumps(climoData)
    if percentileData:
        if varnum == 1:
            extra_template_values['percentileData'] = json.dumps(percentileData)
        if varnum == 2:
            extra_template_values['percentileData2'] = json.dumps(percentileData)
    return extra_template_values,timeSeriesTextData,timeSeriesGraphData
Пример #5
0
def get_time_series(template_values, shape_type):
    """
    Time series request for points or fusion tables
    (shape_type p or ft, respectively)
    Args:
        template_values: a dictionary of user and system input
        shape_type: p (points) or ft (fusion tables)
    Returns:
        updated template_values with time series data
    """
    #================================
    logger = loggerFunctions.set_logger('points_debug')
    #Catch threading error with error
    error = None
    #Set the time variables variables (depends on timeSeriesCalc)
    time_vars = set_time_vars_for_processing(template_values,1)
    #Set collection and update template_values
    productTS = template_values['productTS']
    collection = set_collection(
        template_values,productTS, template_values['variableTS'],
        template_values['modelTS'], template_values['scenarioTS'],
        time_vars, 1, logger)
    if productTS =='MACA' or productTS=='NASANEX':
        scenarioTS = template_values['scenarioTS']
        collection = collection.filterMetadata("scenario", "equals",scenarioTS)
    collection2 = None
    if template_values['variable2display'] != 'none':
        time_vars2 = set_time_vars_for_processing(template_values,2)
        product2TS = template_values['product2TS']
        scenario2TS = template_values['scenario2TS']
        collection2 = set_collection(
            template_values, product2TS, template_values['variable2TS'],
            template_values['model2TS'], template_values['scenario2TS'],
            time_vars2, 2, logger)
        if product2TS =='MACA' or product2TS=='NASANEX':
            collection2 = collection2.filterMetadata("scenario", "equals",scenario2TS)
    #================================
    #Run threads and update template variables
    extra_template_values,timeSeriesTextData, timeSeriesGraphData = run_threads(
        collection,template_values, time_vars,1,shape_type,logger)
    extra_template_values['timeSeriesTextData'] = timeSeriesTextData
    extra_template_values['timeSeriesGraphData'] = json.dumps(timeSeriesGraphData)
    template_values.update(extra_template_values)
    if collection2:
        extra_template_values,timeSeriesTextData, timeSeriesGraphData = run_threads(
            collection2,template_values, time_vars2,2,shape_type,logger)
        extra_template_values['timeSeriesTextData2'] = timeSeriesTextData
        extra_template_values['timeSeriesGraphData2'] = json.dumps(timeSeriesGraphData)
        template_values.update(extra_template_values)
    #================================
    return template_values
Пример #6
0
def get_all_maxDates():
    import loggerFunctions
    logger_temp = loggerFunctions.set_logger('maxDates')
    threads = [None for p in default_maxDateCalc]
    maxDates = {}
    # Loop over default_maxDateCalc defined in STATIC section
    #   and start a thread for each product
    for t_idx, prod_args in enumerate(default_maxDateCalc):
        logger_temp.info('Running thread: ' + str(t_idx))
        logger_temp.info('Product: ' + prod_args[0])
        # Add thread index and thread data to arguments
        thread_args = prod_args[1] + (t_idx, maxDates)
        t = threading.Thread(target=maxDateWorker, args=thread_args)
        # Keep track of the thread by putting it in a slot in threads
        threads[t_idx] = t
        # Start thread
        t.start()
    # Collect threads
    for t_idx, t in enumerate(threads):
        t.join(10)
    return maxDates
Пример #7
0
def get_all_maxDates():
    import loggerFunctions
    logger_temp = loggerFunctions.set_logger('maxDates')
    threads = [None for p in default_maxDateCalc]
    maxDates = {}
    # Loop over default_maxDateCalc defined in STATIC section
    #   and start a thread for each product
    for t_idx, prod_args in enumerate(default_maxDateCalc):
        logger_temp.info('Running thread: ' + str(t_idx))
        logger_temp.info('Product: ' + prod_args[0])
        # Add thread index and thread data to arguments
        thread_args = prod_args[1] + (t_idx, maxDates)
        t = threading.Thread(target=maxDateWorker, args=thread_args)
        # Keep track of the thread by putting it in a slot in threads
        threads[t_idx] = t
        # Start thread
        t.start()
    # Collect threads
    for t_idx, t in enumerate(threads):
        t.join(10)
    return maxDates
Пример #8
0
def runTool(self, applicationName):
    ee.Initialize(config.EE_CREDENTIALS)
    ee.data.setDeadline(180000)
    import loggerFunctions
    logger = loggerFunctions.set_logger('TEST')
    # Initialize forms
    template_values = templatevariables.set_initial_template_values(
        self, applicationName)
    '''
    try:
        template_values = templatevariables.set_initial_template_values(
            self, applicationName)
    except:
        template_values = {}
    '''
    # Check user input for errors:
    fieldID, input_err = formchecks.check_user_input(self, template_values)
    if input_err is None:
        if self.request.arguments():
            # Update template values with mapid or time series data
            toolAction = template_values['toolAction']
            if toolAction == 'getTimeSeriesOverDateRange':
                subDomainTypeTS = template_values['subDomainTypeTS']
                if subDomainTypeTS == 'points':
                    shape_type = 'p'
                elif subDomainTypeTS == 'customShapes':
                    shape_type = 'ft'
                template_values = timeseriesMethods.get_time_series(
                    template_values, shape_type)
            elif (toolAction == 'getMap' or
                  toolAction == 'downloadRectangleSubset' or
                  toolAction == 'showSingleValueOnMap' or
                  toolAction == 'downloadFusionTableSubset'):
                template_values = mappingMethods.get_images(template_values)
    else:
        template_values['form_error'] = {fieldID: input_err}
        #template_values['form_error'] = {'fieldID':str(fieldID),'error':str(input_err)}
    return template_values
Пример #9
0
def runTool(self, applicationName):
    ee.Initialize(config.EE_CREDENTIALS)
    ee.data.setDeadline(180000)
    import loggerFunctions
    logger = loggerFunctions.set_logger('TEST')
    # Initialize forms
    template_values = templatevariables.set_initial_template_values(
        self, applicationName)
    '''
    try:
        template_values = templatevariables.set_initial_template_values(
            self, applicationName)
    except:
        template_values = {}
    '''
    # Check user input for errors:
    fieldID, input_err = formchecks.check_user_input(self, template_values)
    if input_err is None:
        if self.request.arguments():
            # Update template values with mapid or time series data
            toolAction = template_values['toolAction']
            if toolAction == 'getTimeSeriesOverDateRange':
                subDomainTypeTS = template_values['subDomainTypeTS']
                if subDomainTypeTS == 'points':
                    shape_type = 'p'
                elif subDomainTypeTS == 'customShapes':
                    shape_type = 'ft'
                template_values = timeseriesMethods.get_time_series(
                    template_values, shape_type)
            elif (toolAction == 'getMap'
                  or toolAction == 'downloadRectangleSubset'
                  or toolAction == 'showSingleValueOnMap'
                  or toolAction == 'downloadFusionTableSubset'):
                template_values = mappingMethods.get_images(template_values)
    else:
        template_values['form_error'] = {fieldID: input_err}
        #template_values['form_error'] = {'fieldID':str(fieldID),'error':str(input_err)}
    return template_values
Пример #10
0
def get_climatology(collection, product, variable, dateStart, dateEnd,
                    statistic, calculation, yearStartClim, yearEndClim,
                    logger=None):
    """Return the climatology image
    Args:
        collection: EarthEngine collection to process (has already selected variable)
        product: string of the product ()
        variable: string of the variable ()
        dateStart: string of the start date isoformat (YYYY-MM-DD)
        dateEnd: string of the end date isoformat (YYYY-MM-DD)
        statistic: string of the statistic (Mean, Median, Total, etc.)
        calculation: string of the calculation type
            (anom, value, anompercentof,anompercentchange,clim)
        yearStartClim: string of the climatology start year
        yearEndClim: string of the climatology end year
        logger:
    Returns:
        EarthEngine image object
        String of additional notes about the collection
    """
    #==============
    logger = loggerFunctions.set_logger('get_images_debug')

    yearStartClim = int(yearStartClim)
    yearEndClim = int(yearEndClim)

    #Build python datetime objects from the date string
    dateStart_dt = dt.datetime.strptime(dateStart, '%Y-%m-%d')
    dateEnd_dt = dt.datetime.strptime(dateEnd, '%Y-%m-%d')

    #==============
    #Check timedelta between start and end is greater than 1 year
    def yearsahead(years, start_date):
        try:
            return start_date.replace(year=start_date.year + years)
        except:   # Must be 2/29!
            assert from_date.month == 2 and from_date.day == 29 # can be removed
            return from_date.replace(month=2, day=28, year=start_date.year+years)
    #Climo products will have less than a year to avg over too
    if dateEnd_dt <= yearsahead(1, dateStart_dt) or product in ['MACA', 'NASANEX']:
        sub_year_flag = False
        doyStart = dateStart_dt.timetuple().tm_yday
        doyEnd = dateEnd_dt.timetuple().tm_yday
        dayStart = dateStart[5:]
        dayEnd = dateEnd[5:]
        if(doyStart<doyEnd):
            num_days = len(range(doyStart,doyEnd))
        else:
            num_days = len(range(doyStart,366))+len(range(1,doyEnd))
    else:
        sub_year_flag = True
        doyStart = 1
        doyEnd = 366
        num_days = 366
    #==============
    if sub_year_flag:
        #List sequence is inclusive (i.e. don't advance yearEnd)
        yearListClim = ee.List.sequence(yearStartClim,yearEndClim) #list inclusive
        num_years =yearEndClim - yearStartClim + 1
    else:
        yearListClim = ee.List.sequence(yearStartClim,yearEndClim-1)#list inclusive
        num_years =yearEndClim -1 - yearStartClim + 1

    #==============Not technically correct.. takes min over a single year.. not min over the time period
    # these are both resulting in 429 errors right now and 500 server errors too
    doy_filter = ee.Filter.calendarRange(doyStart, doyEnd, 'day_of_year')
    if statistic == 'Min':
        def min_climatology_func(year):
            """For each year, return an image of the minimum value over the DOY range"""
            year_filter =ee.Filter.calendarRange(year, year, 'year')
            return ee.Image(collection.filter(year_filter).filter(doy_filter).min())
        climatology_coll = ee.ImageCollection.fromImages(
            yearListClim.map(min_climatology_func))
        climatology_img = get_statistic(climatology_coll, 'Mean', logger=logger)
    #==============Not technically correct.. takes max over a single year.. not max over the time period
    # these are both resulting in 429 errors right now... too many requests. and 500 server errors too
    elif statistic == 'Max':
        def max_climatology_func(year):
            """For each year, return an image of the minimum value over the DOY range"""
            year_filter =ee.Filter.calendarRange(year, year, 'year')
            return ee.Image(collection.filter(year_filter).filter(doy_filter).max())
        climatology_coll = ee.ImageCollection.fromImages(
             yearListClim.map(max_climatology_func))
        climatology_img = get_statistic(climatology_coll, 'Mean', logger=logger)
    #==============
    elif (statistic == 'Mean' or statistic == 'Total' or statistic == 'Median'):
        #FilterDate needs an extra day on the high end,Set yearEnd to Jan 1st of next year
        yearStartClimUTC = dt.datetime(yearStartClim, 1, 1)
        yearEndClimUTC = dt.datetime(yearEndClim+1, 1, 1)
        #===================
        climatology_coll=collectionMethods.get_climatology_collection(product, variable)
        if climatology_coll=='None' or (product=='G' and (yearEndClim!=1981 or yearStartClim!=2010)):  #no pre-calculated climos
            climatology_coll_temp = 'None'
            climatology_coll = collection.filterDate(
               yearStartClimUTC, yearEndClimUTC).filter(doy_filter)
        else: #yes pre-calculated climos
            climatology_coll_temp = 'Some'
            #climatology_coll = climatology_coll.filterMetadata('doy','greater_than',doyStart) \
            #   .filterMetadata('doy','not_greater_than',doyEnd)
            if doyStart <= doyEnd:
                # Date range is in same year
                climatology_coll = climatology_coll.filter(ee.Filter.And(
                    ee.Filter.gte('doy', doyStart),
                    ee.Filter.lte('doy', doyEnd)))
            else:
                # Date range wraps around to next year
                climatology_coll = climatology_coll.filter(ee.Filter.Or(
                    ee.Filter.gte('doy', doyStart),
                    ee.Filter.lte('doy', doyEnd)))
        #===================

        if not sub_year_flag:
            climatology_img = get_statistic(
                climatology_coll, statistic, logger=logger)
            if statistic=='Total' and climatology_coll_temp=='None':
                climatology_img =climatology_img.divide(num_years)
        else:
            climatology_img = get_statistic(
                climatology_coll, statistic, logger=logger)

    climatologyNote = 'Average calculated from {0}-{1}'.format(str(yearStartClim), str(yearEndClim))

    return climatology_img, climatology_coll,climatologyNote,num_days
Пример #11
0
def get_images(template_values):
    """"""
    #from forms import stateLat, stateLong

    #set up logger
    logger = loggerFunctions.set_logger('get_images_debug')
    TV = {}
    for key, val in template_values.iteritems():
        TV[key] = val
    product = TV['product']
    var = TV['variable']
    model = TV['model']
    scenario = TV['scenario']
    calculation = TV['calculation']
    toolAction = TV['toolAction']
    yearStartClim = TV['yearStartClim']
    yearEndClim = TV['yearEndClim']
    statistic = TV['statistic']
    units = TV['units']
    palette = TV['palette']
    minColorbar = template_values['minColorbar']
    maxColorbar = template_values['maxColorbar']
    colorbarType = template_values['colorbarType']
    colorbarmap = template_values['colorbarmap']
    colorbarTicks = template_values['colorbarTicks']
    scale = TV['scale']
    downloadFilename = TV['downloadFilename']
    mask = TV['mask']
    if mask !='none':
        maskMin = TV['maskMin']
        maskMax = TV['maskMax']

    # Build EarthEngine date objects from date strings and explicitly set GMT
    # Note, by default EarthEngine date objects are already GMT
    if product in ['MACA', 'NASANEX']:
        monthList = [
            '', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul',
            'Aug', 'Sept', 'Oct', 'Nov', 'Dec']
        yearStartClimFut = TV['yearStartClimFut']
        yearEndClimFut = TV['yearEndClimFut']
        yearStartClim = TV['yearStartClim']
        yearEndClim = TV['yearEndClim']
        #these are for calculating the source title
        if calculation in ['anom', 'anompercentof', 'anompercentchange', 'percentile','value']:
            dS = yearStartClimFut+'-'+TV['monthStartFut']+'-'+TV['dayStartFut']
            dE = yearEndClimFut+'-'+TV['monthEndFut']+'-'+TV['dayEndFut']
        else:
            dS = yearStartClim+'-'+TV['monthStartFut']+'-'+TV['dayStartFut']
            dE = yearEndClim+'-'+TV['monthEndFut']+'-'+TV['dayEndFut']
        #just need to get the doyfilter right with these
        dSFut = yearStartClimFut+'-'+TV['monthStartFut']+'-'+TV['dayStartFut']
        dSHist = yearStartClim+'-'+TV['monthStartFut']+'-'+TV['dayStartFut']
        if int(TV['monthEndFut']) > int(TV['monthStartFut']):
            dEFut = yearStartClimFut+'-'+TV['monthEndFut']+'-'+TV['dayEndFut']
            dEHist = yearStartClim+'-'+TV['monthEndFut']+'-'+TV['dayEndFut']
        else:
            dEFut = str(int(yearStartClimFut)+1)+'-'+TV['monthEndFut']+'-'+TV['dayEndFut']
            dEHist = str(int(yearStartClim)+1)+'-'+TV['monthEndFut']+'-'+TV['dayEndFut']
        dSUTC = ee.Date(dSHist, 'GMT')
        dEUTC = ee.Date(dEHist, 'GMT')

    else:
        dS = TV['dateStart']
        dE = TV['dateEnd']
        dSUTC = ee.Date(dS, 'GMT')
        dEUTC = ee.Date(dE, 'GMT')

    #==============
    #Initial Collection
    #==============
    frequency='daily'
    if product=='MACA' or product=='NASANEX':
        frequency='monthly'
    collection, coll_name, coll_desc, var_desc, notes = collectionMethods.get_collection(
        product, var, model, scenario, frequency,logger=logger)
    #if product =='NVET':
     #   frequency='yearly'
    #collection, coll_name, coll_desc, notes = collectionMethods.get_collection(
    #    product, var, model, scenario, frequency,logger=logger)      

    #==============
    #Calculation:Values,Climatology,Anomalies
    #==============
    #get_statistic returns ee.ImageCollection
    if calculation in ['value']:
        if product in ['MACA', 'NASANEX']:
            # This is future average
            # FilterDate is exclusive on the high end, include an extra day on dEUTC
            collection = collection.filterMetadata('scenario', 'equals', scenario)
            calc_img,clim_temp,clim_notes_temp,num_days = get_climatology(
                  collection, product, var, dSFut, dEFut, statistic,
                  calculation, yearStartClimFut, yearEndClimFut, logger=logger)
        elif product == 'CFSV2':  #convert 6hrly to daily
            collection = collection_CFSV2.convert_6hrly_to_daily(
                collection, var, dSUTC, dEUTC, logger)
            calc_img = get_statistic(collection, statistic, logger=logger)
        else:
            collection = collection.filterDate(dSUTC, dEUTC.advance(1,'day'))
            calc_img = get_statistic(collection, statistic, logger=logger)
    elif calculation in ['anom', 'anompercentof', 'anompercentchange', 'clim', 'percentile','zscore']:
        if product in ['MACA', 'NASANEX']:   #this is historical average
            hist_coll = collection.filterMetadata('scenario', 'equals', 'historical')
            clim_img, clim_coll, climatologyNotes,num_days = get_climatology(
                hist_coll, product, var, dSHist, dEHist, statistic,
                calculation, yearStartClim, yearEndClim, logger=logger)
        else:
            clim_img, clim_coll, climatologyNotes,num_days = get_climatology(
                collection, product, var, dS, dE, statistic,
                calculation, yearStartClim, yearEndClim, logger=logger)

        if calculation in ['clim']:
            calc_img = clim_img
        else:
            # anomaly = future wrt historical
            if product in ['MACA', 'NASANEX']:
                # This is future average
                future_col = collection.filterMetadata('scenario', 'equals', scenario)
                calc_img, clim_temp, clim_notes_temp,num_day = get_climatology(
                    future_col, product, var, dSFut, dEFut, statistic,
                    calculation, yearStartClimFut, yearEndClimFut, logger=logger)
                calc_img = get_anomaly(calc_img, clim_img, clim_coll, calculation,statistic,num_days)
            else:
                collection = collection.filterDate(dSUTC, dEUTC.advance(1, 'day'))
                calc_img = get_statistic(collection, statistic, logger=logger)
                calc_img = get_anomaly(calc_img, clim_img, clim_coll, calculation,statistic,1)

        TV['climatologyNotes'] = climatologyNotes

    #==============
    #Units
    #==============
    calc_img = modify_units(calc_img, var, product, calculation, units)

    #==============
    #Extra template values
    #==============
    extra_template_values = {
    }

    #==============
    #Apply Mask
    #==============
    if mask != 'none':
        maskMin = float(maskMin)
        maskMax = float(maskMax)
        if mask == 'above':
            calc_img = ee.Image(calc_img).mask(calc_img.lte(maskMax))
        elif mask == 'below':
            calc_img = ee.Image(calc_img).mask(calc_img.gte(maskMin))
        elif mask == 'exterior':
            calc_img = ee.Image(calc_img).mask(calc_img.lte(maskMin).Or(calc_img.gte(maskMax)))
        elif mask == 'interior':
            calc_img = ee.Image(calc_img).mask(calc_img.gte(maskMin).And(calc_img.lte(maskMax)))

    #==============
    #Get mapid
    #==============
    mapid = {'mapid':[], 'token':[]}
    ## Limit the image values to the min and max before visualizing
    ##here calc_img = ee.Image()
    vis_image = calc_img.clamp(float(minColorbar), float(maxColorbar))

    #we could change this to just if var in ['TrueColor', 'FalseColor'] instead of all the products
    if (product in ['L_TOA', 'L5_TOA', 'L7_TOA', 'L8_TOA', 'L_SR', 'L5_SR', 'L7_SR', 'L8_SR', 'NVET'] and
        var in ['TrueColor', 'FalseColor']):
        ## Hard code display of multi-band Landsat images (for now)
        ## Intentionally not using "vis_image" here to avoid clamp (for now)
        mapid = map_image(calc_img, TV['opacity'], None, minColorbar, maxColorbar)
        ## Bands can be set separately by passing a string of comma separate numbers
        ##mapid = map_image(calc_img, TV['opacity'], None, "0, 0, 0", "0.28, 0.28, 0.32")
    #elif ((colorbarmap == 'invUSDM' or colorbarmap == 'USDM') and calculation == 'percentile'):
    #    value_list = [3,6,10,20,30,40]
    #    mapid = map_image(apply_sld_colormap_styling_image(vis_image, palette, colorbarType, value_list))
    #elif ((colorbarmap == 'invUSDMwWet' or colorbarmap == 'USDMwWet') and
    #      calculation == 'percentile'):
    #    value_list = [3,6,10,20,30,70,80,90,94,97,100]
    #    mapid = map_image(apply_sld_colormap_styling_image(vis_image, palette, colorbarType, value_list))
    elif calculation == 'anompercentof':
        value_list = [int(i) for i in colorbarTicks.split(",")][1:]
        #value_list = [5,25,50,70,90,110,130,150,200,400,800]
        mapid = map_image(apply_sld_colormap_styling_image(vis_image, palette, colorbarType,value_list))
    elif colorbarType == 'continuous':
        mapid = map_image(
            vis_image, TV['opacity'], palette, minColorbar, maxColorbar)
    elif colorbarType == 'discrete':
        palette_list = palette.split(',')
        ## Intentionally add 1 to x since value needs to be for the max of the interval
        value_list = [
           float(minColorbar) + float(x + 1) * (float(maxColorbar) - float(minColorbar)) / len(palette_list)
           for x in xrange(len(palette_list))]
        mapid = map_image(apply_sld_colormap_styling_image(vis_image, palette, colorbarType,value_list))

    if mapid and mapid['mapid'] and mapid['token']:
        extra_template_values['mapid'] = mapid['mapid']
        extra_template_values['token'] = mapid['token']
        TV.update(extra_template_values)

    #==============
    #Get point value
    #==============
    if toolAction == 'showSingleValueOnMap':
        point_value = get_point_value(
            calc_img, float(TV['pointLat']), float(TV['pointLong']), var)
        extra_template_values['pointValue'] = '{0:0.4f}'.format(point_value)
        TV.update(extra_template_values)

    #==============
    #Region data extraction
    #==============
    if toolAction == 'downloadRectangleSubset' or toolAction=='downloadFusionTableSubset':
        #the rect coordinates here have been set for either rect or bounding box of fusion table
        NELat = TV['NELat']
        NELong = TV['NELong']
        SWLat = TV['SWLat']
        SWLong = TV['SWLong']
        rectangle = '[[' + SWLong + ',' + NELat + '],' + \
                     '[' + NELong + ',' + NELat + '],' + \
                     '[' + NELong + ',' + SWLat + '],' + \
                     '[' + SWLong + ',' + SWLat + ']]'
        downloadMapFormat =TV['downloadMapFormat']
        projection = TV['downloadProjection']

        if toolAction =='downloadFusionTableSubset':
            fusiontable = TV['fusiontabledownload']
            fusiontablename = TV['fusiontabledownloadname']

            region = ee.FeatureCollection('ft:'+fusiontable)
            if fusiontablename:
                region = region.filter(ee.Filter.eq('Name', fusiontablename))
            calc_img = calc_img.clip(region.geometry())
            rectangle = json.dumps(region.geometry().bounds().getInfo()['coordinates'])

        downloadOptions = {
           'name':downloadFilename,
           'scale':scale,
           'crs':projection,
           'region':rectangle,
           'maxPixels': 1e9,
           #'format':downloadMapFormat
        }

        #this is a way to get .tif to be non-blank
        #if downloadMapFormat=='png' or downloadMapFormat=='jpg' or downloadMapFormat=='tif':
        #    vis_image = calc_img.visualize(
        #        bands=var, min=float(minColorbar), max=float(maxColorbar),
        #        palette=palette.split(',')) #palette must be array of strings, not a string
        #    downloadURL = vis_img.getDownloadUrl(downloadOptions)
        #elif downloadMapFormat=='tif':
        downloadURL = calc_img.getDownloadUrl(downloadOptions)

        # getDownloadURL is the preferred "spelling" for this function
        # Switch at some point when everyone updates their earthengine-api
        #downloadURL = ???Export.Image(,'title',{'region': rectangle})
        extra_template_values['downloadURL'] = downloadURL
        TV.update(extra_template_values)

    #==============
    #Update template values
    #==============
    return TV
Пример #12
0
def set_initial_template_values(self, applicationName):
    import loggerFunctions
    logger = loggerFunctions.set_logger('templatevariables')

    # Override saved maxDates with memcache values
    maxDates =collection_dataStore.defaults_maxDates
    for key, value in maxDates.iteritems():
        try:
            data = memcache.get(key)
            if data is not None:
                maxDates[key] = data
        except:
            pass

    timeSeriesCalc_default,subDomainTypeTS_default,ftChoice1_default,variable_default,productType_default,product_default,statistic_default,calculation_default,units_default,varUnits_default,colorbar_default,colorbarMin_default,colorbarMax_default,colorbarSize_default,colorbarTicks_default,timeperiod_default,timeperiod_days,scale_default,mapCenterLongLat_default,pointLat_default ,pointLon_default,mapCenterLongLat,mapzoom_default,opacity_default,NELat_default,NELong_default,SWLat_default,SWLong_default,minYear_default,minDate_default,runningMeanYears_default,runningMeanDays_default,basemap_default,CHIRPSvariable_default =application_defaults.get_applicationDefaults(self, applicationName)

    #Set month/day/Start/End defaults according to dateStart/End
    #Set tempend
    variable = get_variable(self, 'variable', variable_default)
    product = get_variable(self, 'product', product_default)
    if variable in collection_dataStore.names_notdefault:
        variablename=variable;
    else:
        variablename='default'
    tempend = maxDates[collection_dataStore.names_memcache[product][variablename]]
    #Set tempstart
    tempstart = (datetime.datetime(int(tempend[0:4]),int(tempend[5:7]),int(tempend[8:10]))-datetime.timedelta(days=timeperiod_days)).strftime('%Y-%m-%d')
    #Set Month/Day defaults
    monthStart_default = tempstart[5:7].lstrip('0')
    dayStart_default = tempstart[8:10].lstrip('0')
    monthEnd_default = tempend[5:7].lstrip('0')
    dayEnd_default = tempend[8:10].lstrip('0')


    toolAction =get_variable(self,'toolAction','getMap')
    if(toolAction=='getTimeSeriesOverDateRange'): #timeseries
        variable =get_variable(self,'variableTS',variable_default)
        variable2display =get_variable(self,'variable2display','none')
        productType =get_variable(self,'productTypeTS',productType_default)
        product =get_variable(self,'productTS',product_default)
        statistic =get_variable(self,'statisticsTS',statistic_default)
        calculation =get_variable(self,'calculation',calculation_default)
        units =get_variable(self,'unitsTS',units_default)
        varUnits =get_variable(self,'varUnitsTS',varUnits_default)
        timeperiod =get_variable(self,'timeperiodTS',timeperiod_default)
        if variable2display!='none': #2 variables
            productType2 =get_variable(self,'productType2TS',productType_default)
            product2 =get_variable(self,'product2TS',product_default)
            variable2 =get_variable(self,'variable2TS',variable_default)
            statistic2 =get_variable(self,'statistic2TS',statistic_default)
            varUnits2 =get_variable(self,'var2UnitsTS',varUnits_default)
            timeperiod2 =get_variable(self,'timeperiod2TS',timeperiod_default)
        else: #1 variable
            productType2=productType
            product2 = product
            variable2=variable
            statistic2=statistic
            varUnits2=varUnits
            timeperiod2=timeperiod
    else: #mapping
        variable =get_variable(self,'variable',variable_default)
        productType =get_variable(self,'productType',productType_default)
        product =get_variable(self,'product',product_default)
        statistic =get_variable(self,'statistic',statistic_default)
        calculation =get_variable(self,'calculation',calculation_default)
        units =get_variable(self,'units',units_default)
        varUnits =get_variable(self,'varUnits',varUnits_default)
        timeperiod =get_variable(self,'timeperiod',timeperiod_default)
        productType2=productType
        product2 = product
        variable2=variable
        statistic2=statistic
        varUnits2=varUnits
        timeperiod2=timeperiod

    template_values={
        'applicationName':applicationName,
        'form_error': {},
         #Sharelink breaks if get_all is used since lists can not be embedded in urls
        'layer': self.request.get_all('layer',[])
    }
    list_baseoptions={
        'mask':                'none',
        'maskMin':             '',
        'maskMax':             '',
        'downloadURL':         '',
        'downloadFilename':    'climateEngine_download',
        'downloadregion':      'noRegion',
        'downloadProjection':   'EPSG:4326',
        'mapid':                '',
        'token':                '',
        'dispEnv':              '',
        #Variable Options
        'toolAction':           'getMap',
        'variable':             variable,
        'variableTS':           variable,
        'variable2TS':          variable2,
        'variable2display':     'none',
        'productType':          productType,
        'productTypeTS':        productType,
        'productType2TS':       productType2,
        'product':              product,
        'productTS':            product,
        'product2TS':           product2,
        'statistic':            statistic,
        'statisticTS':          statistic,
        'statistic2TS':         statistic2,
        'calculation':          calculation,
        'units':                units,
        'unitsTS':              units,
        'varUnits':             varUnits,
        'varUnitsTS':           varUnits,
        'var2UnitsTS':          varUnits2,
        'timeperiod':           timeperiod,
        #Map Options
        'opacity':              opacity_default,
        'mapCenterLongLat':     mapCenterLongLat_default,
        'mapzoom':              mapzoom_default,
        #Get Map Options
        'kmlurl':               '',
        'kmlurl2':              '',
        'kmloption':            '',
        'scale':                scale_default,
        'NELat':                NELat_default,
        'NELong':               NELong_default,
        'SWLat':                SWLat_default,
        'SWLong':               SWLong_default,
        #Colorbar Options
        'palette':              '',
        'minColorbar':         colorbarMin_default,
        'maxColorbar':         colorbarMax_default,
        'colorbarmap':         colorbar_default,
        'colorbarsize':        colorbarSize_default,
        'colorbarLabel':       '',  #technically this is dependent on variable(should ditch)
        'colorbarType':        'continuous',
        'colorbarTicks':       colorbarTicks_default,
        #TimeSeries Options
        'timeSeriesCalc':      timeSeriesCalc_default,
        'chartType':           '',
        'subDomainTypeTS':     subDomainTypeTS_default,
        'basemap':             basemap_default
    }
    add_templateVariable(self,list_baseoptions,template_values)
    #############################################
    ##     FEWSNET OPTIONS                     ##
    #############################################
    if applicationName =='fewsNet' or applicationName=='precisionGrazing' or applicationName=='gddTool':
        template_values['CHIRPSvariable']= CHIRPSvariable_default,
    #############################################
    ##      MACA OPTIONS                     ##
    #############################################
    list_MACA={
        'model':           'inmcm4',
        'scenario':        'historical',
        'modelTS':         'inmcm4',
        'scenarioTS':      'historical',
        'model2TS':        'inmcm4',
        'scenario2TS':     'historical'
    }
    add_templateVariable(self,list_MACA,template_values)

    #############################################
    ##      TEMPLATE TIME OPTIONS              ##
    #############################################
    for p in ['product', 'product2']:
        product = self.request.get(p, product_default)
        yearStartClim= collection_dataStore.defaults_yearClim[product]['yearStartClim']
        yT= collection_dataStore.defaults_yearClim[product]['yearTarget']
        yearEndClim=collection_dataStore.defaults_yearClim[product]['yearEndClim']
        if yearEndClim=='default':
            yearEndClim=tempend[0:4]
        yearStartClimFut = collection_dataStore.defaults_yearClim['MACA']['yearStartClimFut']
        yearEndClimFut = collection_dataStore.defaults_yearClim['MACA']['yearEndClimFut']
        if p == 'product':
            yearTarget = yT
        if p == 'product2':
            yearTarget2 = yT

    mon_names = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec']

    maxYear_default = tempend[0:4]
    maxDate_default = tempend

    list_timeoptions={
        'minYear':          minYear_default,
        'minDate':          minDate_default,
        'maxDate':          maxDate_default,
        'maxYear':          maxYear_default,
        'dateStart':        tempstart,
        'dateEnd':          maxDate_default,
        'dateStartTS':      tempstart,
        'dateEndTS':        maxDate_default,
        'yearTargetData':   yearTarget,
        'yearTargetFigure': yearTarget,
        'yearTargetForm':   yearTarget,
        'yearStart':        yearStartClim,
        'yearEnd':          yearEndClim,
        'monthStart':       monthStart_default,
        'monthEnd':         monthEnd_default,
        'dayStart':         dayStart_default,
        'dayEnd':           dayEnd_default,
        'season':           'custom',
        'monthStartFut':    monthStart_default,
        'monthEndFut':      monthEnd_default,
        'dayStartFut':      dayStart_default,
        'dayEndFut':        dayEnd_default,
        'seasonFut':        'custom',
        'minYear2':         minYear_default,
        'minDate2':         minDate_default,
        'maxDate2':         maxDate_default,
        'maxYear2':         maxYear_default,
        'dateStart2TS':     tempstart,
        'dateEnd2TS':       maxDate_default,
        'yearTarget2Data':  yearTarget2,
        'yearStart2':       yearStartClim,
        'yearEnd2':         yearEndClim,
        'monthStart2':      monthStart_default,
        'monthEnd2':        monthEnd_default,
        'dayStart2':        dayStart_default,
        'dayEnd2':          dayEnd_default,
        'season2':          'custom',
        'runningMeanYears': runningMeanYears_default,
        'runningMeanDays':  runningMeanDays_default,
        'timeperiodTS':     timeperiod,
        'timeperiod2TS':    timeperiod2,
        'yearStartClim':    yearStartClim,
        'yearEndClim':      yearEndClim,
        'yearStartClimFut': yearStartClimFut,
        'yearEndClimFut':   yearEndClimFut
    }
    add_templateVariable(self,list_timeoptions,template_values)

    #add maxValue form elements
    for key, value in collection_dataStore.maxDates_lookup.iteritems():
        add_templateVariable(self,{value:maxDates[key]},template_values)

    d = ' %02d' %int(template_values['dayStart'])
    template_values['seasonStart'] = mon_names[int(template_values['monthStart']) -1] + d
    d = ' %02d' %int(template_values['dayEnd'])
    template_values['seasonEnd'] = mon_names[int(template_values['monthEnd']) -1] + d
    d = ' %02d' %int(template_values['dayStart2'])
    template_values['seasonStart2'] = mon_names[int(template_values['monthStart2']) -1] + d
    d = ' %02d' %int(template_values['dayEnd2'])
    template_values['seasonEnd2'] = mon_names[int(template_values['monthEnd2']) -1] + d

    #if Time Series of Single Year Daily, override start/end dates if needed
    if template_values['timeSeriesCalc'] == 'intraannual':
        s_dt = datetime.datetime.strptime(template_values['dateStart'],'%Y-%m-%d')
        e_dt = datetime.datetime.strptime(template_values['dateEnd'],'%Y-%m-%d')
        if (e_dt - s_dt).days > 366:
            y = template_values['yearTargetData']
            m = template_values['monthStart']
            d = template_values['dayStart']
            template_values['dateStartTS'] = y + '-' + m + '-' + d
            template_values['dateEndTS'] = str(int(y) + 1) + '-' + m + '-' + d
            template_values['dateStart'] = template_values['dateStartTS']
            template_values['dateEnd'] = template_values['dateEndTS']
            s_dt = datetime.datetime.strptime(template_values['dateStart2TS'],'%Y-%m-%d')
            e_dt = datetime.datetime.strptime(template_values['dateEnd2TS'],'%Y-%m-%d')
        if (e_dt - s_dt).days > 366:
            y = template_values['yearTarget2Data']
            m = template_values['monthStart2']
            d = template_values['dayStart2']
            template_values['dateStart2'] = y + '-' + m + '-' + d
            template_values['dateEnd2TS'] = str(int(y) + 1) + '-' + m + '-' + d


    #############################################
    ##      TEMPLATE FUSION TABLE MAP DOWNLOAD       ##
    #############################################
    list_fusiontablemapdownload={
         'fusiontabledownload':      '1fRY18cjsHzDgGiJiS2nnpUU3v9JPDc2HNaR7Xk8',
         'fusiontabledownloadname':   'California',
         'pointLat':                   pointLat_default,
         'pointLong':                  pointLon_default,
         'pointValue':                 '',
         'downloadMapFormat':          'geotiff',
    }
    add_templateVariable(self,list_fusiontablemapdownload,template_values)

    #############################################
    ##      TEMPLATE POINTS       ##
    #############################################
    pointsLongLat_default=mapCenterLongLat
    marker_colors=['blue','green','orange','purple','yellow','pink','red']
    mapCenterLongLat = template_values['mapCenterLongLat']
    template_values['marker_colors']=marker_colors
    add_templateVariable(self,{'pointsLongLat':pointsLongLat_default},template_values)
    for s_idx in range(1,max_pointsShapes+1):
        add_templateVariable(self,{'p'+str(s_idx)+'check':'checked'},template_values)
        add_templateVariable(self,{'p'+str(s_idx)+'altname':''},template_values)
        add_templateVariable(self,{'p'+str(s_idx):mapCenterLongLat},template_values)
        if s_idx==1:
            add_templateVariable(self,{'p'+str(s_idx)+'display':'block'},template_values)
        else:
            add_templateVariable(self,{'p'+str(s_idx)+'display':'none'},template_values)


    #############################################
    ##      TEMPLATE SHAPES-FUSION TABLE FOR TIME SERIES       ##
    #############################################
    template_values['shape_colors']=['#0000FF','#00FF00','#FFA500','#551A8B','#FFFF00','#FF69B4','#FF0000']
    for s_idx in range(1,max_pointsShapes+1):
        add_templateVariable(self,{'ft'+str(s_idx):''},template_values)
        add_templateVariable(self,{'ft'+str(s_idx)+'columnName':''},template_values)
        add_templateVariable(self,{'ft'+str(s_idx)+'altname':''},template_values)
        add_templateVariable(self,{'ftSubChoice'+str(s_idx):''},template_values)
        add_templateVariable(self,{'polygon'+str(s_idx):''},template_values)
        if s_idx==1:
            add_templateVariable(self,{'ft'+str(s_idx)+'display':'block'},template_values)
            add_templateVariable(self,{'ft'+str(s_idx)+'check':'checked'},template_values)
            add_templateVariable(self,{'ftChoice'+str(s_idx):ftChoice1_default},template_values)
        else:
            add_templateVariable(self,{'ft'+str(s_idx)+'display':'none'},template_values)
            add_templateVariable(self,{'ft'+str(s_idx)+'check':''},template_values)
            add_templateVariable(self,{'ftChoice'+str(s_idx):''},template_values)

    #############################################
    ##      TEMPLATE FORMS       ##
    #############################################
    template_forms={
        #Forms
        'formMask':                      forms.formMask,
        'formVariable2Display':          forms.formVariable2Display,
        'formTimePeriods':               forms.formTimePeriods,
        'formHighChartLayers':           forms.formHighChartLayers,
        'formHighChartLayersIntraannual':forms.formHighChartLayersIntraannual,
        'formDownloadMapFormat':         forms.formDownloadMapFormat,
        'formDownloadProjection':        forms.formDownloadProjection,
        'formSeasons':                   forms.formSeasons,
        'formChartType':                 forms.formChartType,
        'formMonth':                     forms.formMonth,
        'formDay':                       forms.formDay,
        'formMapZoom':                   forms.formMapZoom,
        'formPaletteCustomMap':          forms.formPaletteCustomMap,
        'formPaletteDivMap':             forms.formPaletteDivMap,
        'formPaletteSeqMap':             forms.formPaletteSeqMap,
        'formPaletteSize':               forms.formPaletteSize,
        'formColorbarType':              forms.formColorbarType,
        'formOpacity':                   forms.formOpacity,
        'formUnits':                     forms.formUnits,
        'formTimeSeriesCalc':            forms.formTimeSeriesCalc,
        'formSubDomainTypeTS':           forms.formSubDomainTypeTS,
        'formDownloadRegion':            forms.formDownloadRegion,
        'formFusionTableChoices':        forms.formFusionTableChoices,
        'formProductType':               forms.formProductType,
	'formProductType1':		 forms.formProductType1,
        'formLayers':                    forms.formLayers,
        #'formLayers1':                   forms.formLayers1,
        'formBoolean':                   forms.formBoolean
    }
    template_values.update(template_forms)

    #############################################
    ##      EXTRA FEWS                 ##
    #############################################
    if applicationName=='fewsNet':
        extra_FEWS ={
            'formLayers':                formsFEWSNET.formFEWSNETLayers,
            'formFusionTableChoices':    formsFEWSNET.formFusionTableChoicesFEWSNET,
            'formCHIRPSChoicesFEWSNET':  formsFEWSNET.formCHIRPSChoicesFEWSNET,
            'formMODISChoicesFEWSNET':   formsFEWSNET.formMODISChoicesFEWSNET,
            'formLandsatChoicesFEWSNET': formsFEWSNET.formLandsatChoicesFEWSNET,
	}
        template_values.update(extra_FEWS)
    elif applicationName=='precisionGrazing' or applicationName=='gddTool':
        extra_FEWS ={
            'formLayers':                formsPG.formPGLayers,
            'formFusionTableChoices':    formsPG.formFusionTableChoicesPG,
            'formCHIRPSChoicesFEWSNET':  formsFEWSNET.formCHIRPSChoicesFEWSNET,
            'formMODISChoicesFEWSNET':   formsFEWSNET.formMODISChoicesFEWSNET,
            'formLandsatChoicesFEWSNET': formsFEWSNET.formLandsatChoicesFEWSNET,
	    'formSubDomainTypeTS'	:formsPG.formSubDomainTypeTS
        }
        template_values.update(extra_FEWS)


    #############################################
    ##      SHARE LINK                         ##
    #############################################
    #Sharelink depends on most template variables
    template_values['logger'] = logger
    template_values['shareLink'] = set_share_link(template_values,applicationName)
    #############################################
    ##      FORMAT TEMPLATE VARIABLES          ##
    #############################################
    #format template values to allow for different date formats etc...
    #See format_ functions in formchecks.py
    formatted_template_values = {}
    for key, val in template_values.iteritems():
        format_function_name = 'format_' + key
        try:
            format_function = getattr(forms,format_function_name)
        except:
            format_function = None

        if format_function:
            formatted_template_values[key] = format_function(val)
        else:
            formatted_template_values[key] = val
    return formatted_template_values
Пример #13
0
def processPointData(template_values, extra_template_values, time_vars,
                     point_info, logger, timeSeriesTextData,
                     timeSeriesGraphData, varnum):
    logger = loggerFunctions.set_logger('info')

    name = point_info['name']
    altname = point_info['altname']
    marker_color = point_info['marker_color']

    #Initialize the data dicts
    data_dict_ts = formatData.initialize_timeSeriesTextDataDict(
        name, altname=altname)
    data_dict_graph = formatData.initialize_timeSeriesGraphDataDict(
        name, marker_color, altname=altname)

    #logger.info('**********timeSeriesCalc************'+template_values['timeSeriesCalc'])
    climoData = []
    percentileData = []
    #process data according to what timeSereisCalc is
    if template_values['timeSeriesCalc'] == 'days':
        data_dict_ts['Data'], data_dict_graph[
            'Data'] = process_daily_threadData(point_info['data'],
                                               template_values, varnum)
    elif template_values['timeSeriesCalc'] == 'interannual':
        sS_doy = time_vars['seasonStart_doy']
        sE_doy = time_vars['seasonEnd_doy']
        data_dict_ts['Data'], data_dict_graph[
            'Data'] = process_interannual_threadData(point_info['data'],
                                                     template_values, sS_doy,
                                                     sE_doy, varnum, logger)
    elif template_values['timeSeriesCalc'] == 'intraannual':
        #logger.info('**********inside intrannaul************')
        #get data for all years stored in dict with keys year
        doyS = time_vars['doyStart']
        doyE = time_vars['doyEnd']
        yS = time_vars['yearStart']
        yE = time_vars['yearEnd']
        year_dict_ts, year_dict_graph, climoData, percentileData = process_intraannual_threadData(
            point_info['data'], template_values, doyS, doyE, yS, yE, varnum,
            logger)
        for year in range(int(yS), int(yE) + 1):
            '''
            if year in year_dict_ts.keys() and year_dict_ts[year]:
                data_dict_ts['Data'].append(year_dict_ts[year])
            if year in year_dict_graph.keys() and year_dict_graph[year]:
                data_dict_graph['Data'].append(year_dict_graph[year])
            '''
            data_dict_ts['Data'].append(year_dict_ts[year])
            data_dict_graph['Data'].append(year_dict_graph[year])
    timeSeriesTextData.append(data_dict_ts)
    timeSeriesGraphData.append(data_dict_graph)

    if climoData:
        if varnum == 1:
            extra_template_values['climoData'] = json.dumps(climoData)
        if varnum == 2:
            extra_template_values['climoData2'] = json.dumps(climoData)
    if percentileData:
        if varnum == 1:
            extra_template_values['percentileData'] = json.dumps(
                percentileData)
        if varnum == 2:
            extra_template_values['percentileData2'] = json.dumps(
                percentileData)
    return extra_template_values, timeSeriesTextData, timeSeriesGraphData
Пример #14
0
def get_climatology(collection,
                    product,
                    variable,
                    dateStart,
                    dateEnd,
                    statistic,
                    calculation,
                    yearStartClim,
                    yearEndClim,
                    logger=None):
    """Return the climatology image
    Args:
        collection: EarthEngine collection to process (has already selected variable)
        product: string of the product ()
        variable: string of the variable ()
        dateStart: string of the start date isoformat (YYYY-MM-DD)
        dateEnd: string of the end date isoformat (YYYY-MM-DD)
        statistic: string of the statistic (Mean, Median, Total, etc.)
        calculation: string of the calculation type
            (anom, value, anompercentof,anompercentchange,clim)
        yearStartClim: string of the climatology start year
        yearEndClim: string of the climatology end year
        logger:
    Returns:
        EarthEngine image object
        String of additional notes about the collection
    """
    #==============
    logger = loggerFunctions.set_logger('get_images_debug')

    yearStartClim = int(yearStartClim)
    yearEndClim = int(yearEndClim)

    #Build python datetime objects from the date string
    dateStart_dt = dt.datetime.strptime(dateStart, '%Y-%m-%d')
    dateEnd_dt = dt.datetime.strptime(dateEnd, '%Y-%m-%d')

    #==============
    #Check timedelta between start and end is greater than 1 year
    def yearsahead(years, start_date):
        try:
            return start_date.replace(year=start_date.year + years)
        except:  # Must be 2/29!
            assert from_date.month == 2 and from_date.day == 29  # can be removed
            return from_date.replace(month=2,
                                     day=28,
                                     year=start_date.year + years)

    #Climo products will have less than a year to avg over too
    if dateEnd_dt <= yearsahead(1, dateStart_dt) or product in [
            'MACA', 'NASANEX'
    ]:
        sub_year_flag = False
        doyStart = dateStart_dt.timetuple().tm_yday
        doyEnd = dateEnd_dt.timetuple().tm_yday
        dayStart = dateStart[5:]
        dayEnd = dateEnd[5:]
        if (doyStart < doyEnd):
            num_days = len(range(doyStart, doyEnd))
        else:
            num_days = len(range(doyStart, 366)) + len(range(1, doyEnd))
    else:
        sub_year_flag = True
        doyStart = 1
        doyEnd = 366
        num_days = 366
    #==============
    if sub_year_flag:
        #List sequence is inclusive (i.e. don't advance yearEnd)
        yearListClim = ee.List.sequence(yearStartClim,
                                        yearEndClim)  #list inclusive
        num_years = yearEndClim - yearStartClim + 1
    else:
        yearListClim = ee.List.sequence(yearStartClim,
                                        yearEndClim - 1)  #list inclusive
        num_years = yearEndClim - 1 - yearStartClim + 1

    #==============Not technically correct.. takes min over a single year.. not min over the time period
    # these are both resulting in 429 errors right now and 500 server errors too
    doy_filter = ee.Filter.calendarRange(doyStart, doyEnd, 'day_of_year')
    if statistic == 'Min':

        def min_climatology_func(year):
            """For each year, return an image of the minimum value over the DOY range"""
            year_filter = ee.Filter.calendarRange(year, year, 'year')
            return ee.Image(
                collection.filter(year_filter).filter(doy_filter).min())

        climatology_coll = ee.ImageCollection.fromImages(
            yearListClim.map(min_climatology_func))
        climatology_img = get_statistic(climatology_coll,
                                        'Mean',
                                        logger=logger)
    #==============Not technically correct.. takes max over a single year.. not max over the time period
    # these are both resulting in 429 errors right now... too many requests. and 500 server errors too
    elif statistic == 'Max':

        def max_climatology_func(year):
            """For each year, return an image of the minimum value over the DOY range"""
            year_filter = ee.Filter.calendarRange(year, year, 'year')
            return ee.Image(
                collection.filter(year_filter).filter(doy_filter).max())

        climatology_coll = ee.ImageCollection.fromImages(
            yearListClim.map(max_climatology_func))
        climatology_img = get_statistic(climatology_coll,
                                        'Mean',
                                        logger=logger)
    #==============
    elif (statistic == 'Mean' or statistic == 'Total'
          or statistic == 'Median'):
        #FilterDate needs an extra day on the high end,Set yearEnd to Jan 1st of next year
        yearStartClimUTC = dt.datetime(yearStartClim, 1, 1)
        yearEndClimUTC = dt.datetime(yearEndClim + 1, 1, 1)
        #===================
        climatology_coll = collectionMethods.get_climatology_collection(
            product, variable)
        if climatology_coll == 'None' or (
                product == 'G' and
            (yearEndClim != 1981
             or yearStartClim != 2010)):  #no pre-calculated climos
            climatology_coll_temp = 'None'
            climatology_coll = collection.filterDate(
                yearStartClimUTC, yearEndClimUTC).filter(doy_filter)
        else:  #yes pre-calculated climos
            climatology_coll_temp = 'Some'
            #climatology_coll = climatology_coll.filterMetadata('doy','greater_than',doyStart) \
            #   .filterMetadata('doy','not_greater_than',doyEnd)
            if doyStart <= doyEnd:
                # Date range is in same year
                climatology_coll = climatology_coll.filter(
                    ee.Filter.And(ee.Filter.gte('doy', doyStart),
                                  ee.Filter.lte('doy', doyEnd)))
            else:
                # Date range wraps around to next year
                climatology_coll = climatology_coll.filter(
                    ee.Filter.Or(ee.Filter.gte('doy', doyStart),
                                 ee.Filter.lte('doy', doyEnd)))
        #===================

        if not sub_year_flag:
            climatology_img = get_statistic(climatology_coll,
                                            statistic,
                                            logger=logger)
            if statistic == 'Total' and climatology_coll_temp == 'None':
                climatology_img = climatology_img.divide(num_years)
        else:
            climatology_img = get_statistic(climatology_coll,
                                            statistic,
                                            logger=logger)

    climatologyNote = 'Average calculated from {0}-{1}'.format(
        str(yearStartClim), str(yearEndClim))

    return climatology_img, climatology_coll, climatologyNote, num_days
Пример #15
0
def get_images(template_values):
    """"""
    #from forms import stateLat, stateLong

    #set up logger
    logger = loggerFunctions.set_logger('get_images_debug')
    TV = {}
    for key, val in template_values.iteritems():
        TV[key] = val
    product = TV['product']
    var = TV['variable']
    model = TV['model']
    scenario = TV['scenario']
    calculation = TV['calculation']
    toolAction = TV['toolAction']
    yearStartClim = TV['yearStartClim']
    yearEndClim = TV['yearEndClim']
    statistic = TV['statistic']
    units = TV['units']
    palette = TV['palette']
    minColorbar = template_values['minColorbar']
    maxColorbar = template_values['maxColorbar']
    colorbarType = template_values['colorbarType']
    colorbarmap = template_values['colorbarmap']
    colorbarTicks = template_values['colorbarTicks']
    scale = TV['scale']
    downloadFilename = TV['downloadFilename']
    mask = TV['mask']
    if mask != 'none':
        maskMin = TV['maskMin']
        maskMax = TV['maskMax']

    # Build EarthEngine date objects from date strings and explicitly set GMT
    # Note, by default EarthEngine date objects are already GMT
    if product in ['MACA', 'NASANEX']:
        monthList = [
            '', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sept',
            'Oct', 'Nov', 'Dec'
        ]
        yearStartClimFut = TV['yearStartClimFut']
        yearEndClimFut = TV['yearEndClimFut']
        yearStartClim = TV['yearStartClim']
        yearEndClim = TV['yearEndClim']
        #these are for calculating the source title
        if calculation in [
                'anom', 'anompercentof', 'anompercentchange', 'percentile',
                'value'
        ]:
            dS = yearStartClimFut + '-' + TV['monthStartFut'] + '-' + TV[
                'dayStartFut']
            dE = yearEndClimFut + '-' + TV['monthEndFut'] + '-' + TV[
                'dayEndFut']
        else:
            dS = yearStartClim + '-' + TV['monthStartFut'] + '-' + TV[
                'dayStartFut']
            dE = yearEndClim + '-' + TV['monthEndFut'] + '-' + TV['dayEndFut']
        #just need to get the doyfilter right with these
        dSFut = yearStartClimFut + '-' + TV['monthStartFut'] + '-' + TV[
            'dayStartFut']
        dSHist = yearStartClim + '-' + TV['monthStartFut'] + '-' + TV[
            'dayStartFut']
        if int(TV['monthEndFut']) > int(TV['monthStartFut']):
            dEFut = yearStartClimFut + '-' + TV['monthEndFut'] + '-' + TV[
                'dayEndFut']
            dEHist = yearStartClim + '-' + TV['monthEndFut'] + '-' + TV[
                'dayEndFut']
        else:
            dEFut = str(int(yearStartClimFut) +
                        1) + '-' + TV['monthEndFut'] + '-' + TV['dayEndFut']
            dEHist = str(int(yearStartClim) +
                         1) + '-' + TV['monthEndFut'] + '-' + TV['dayEndFut']
        dSUTC = ee.Date(dSHist, 'GMT')
        dEUTC = ee.Date(dEHist, 'GMT')

    else:
        dS = TV['dateStart']
        dE = TV['dateEnd']
        dSUTC = ee.Date(dS, 'GMT')
        dEUTC = ee.Date(dE, 'GMT')

    #==============
    #Initial Collection
    #==============
    frequency = 'daily'
    if product == 'MACA' or product == 'NASANEX':
        frequency = 'monthly'
    collection, coll_name, coll_desc, var_desc, notes = collectionMethods.get_collection(
        product, var, model, scenario, frequency, logger=logger)

    #==============
    #Calculation:Values,Climatology,Anomalies
    #==============
    #get_statistic returns ee.ImageCollection
    if calculation in ['value']:
        if product in ['MACA', 'NASANEX']:
            # This is future average
            # FilterDate is exclusive on the high end, include an extra day on dEUTC
            collection = collection.filterMetadata('scenario', 'equals',
                                                   scenario)
            calc_img, clim_temp, clim_notes_temp, num_days = get_climatology(
                collection,
                product,
                var,
                dSFut,
                dEFut,
                statistic,
                calculation,
                yearStartClimFut,
                yearEndClimFut,
                logger=logger)
        elif product == 'CFSV2':  #convert 6hrly to daily
            collection = collection_CFSV2.convert_6hrly_to_daily(
                collection, var, dSUTC, dEUTC, logger)
            calc_img = get_statistic(collection, statistic, logger=logger)
        else:
            collection = collection.filterDate(dSUTC, dEUTC.advance(1, 'day'))
            calc_img = get_statistic(collection, statistic, logger=logger)
    elif calculation in [
            'anom', 'anompercentof', 'anompercentchange', 'clim', 'percentile',
            'zscore'
    ]:
        if product in ['MACA', 'NASANEX']:  #this is historical average
            hist_coll = collection.filterMetadata('scenario', 'equals',
                                                  'historical')
            clim_img, clim_coll, climatologyNotes, num_days = get_climatology(
                hist_coll,
                product,
                var,
                dSHist,
                dEHist,
                statistic,
                calculation,
                yearStartClim,
                yearEndClim,
                logger=logger)
        else:
            clim_img, clim_coll, climatologyNotes, num_days = get_climatology(
                collection,
                product,
                var,
                dS,
                dE,
                statistic,
                calculation,
                yearStartClim,
                yearEndClim,
                logger=logger)

        if calculation in ['clim']:
            calc_img = clim_img
        else:
            # anomaly = future wrt historical
            if product in ['MACA', 'NASANEX']:
                # This is future average
                future_col = collection.filterMetadata('scenario', 'equals',
                                                       scenario)
                calc_img, clim_temp, clim_notes_temp, num_day = get_climatology(
                    future_col,
                    product,
                    var,
                    dSFut,
                    dEFut,
                    statistic,
                    calculation,
                    yearStartClimFut,
                    yearEndClimFut,
                    logger=logger)
                calc_img = get_anomaly(calc_img, clim_img, clim_coll,
                                       calculation, statistic, num_days)
            else:
                collection = collection.filterDate(dSUTC,
                                                   dEUTC.advance(1, 'day'))
                calc_img = get_statistic(collection, statistic, logger=logger)
                calc_img = get_anomaly(calc_img, clim_img, clim_coll,
                                       calculation, statistic, 1)

        TV['climatologyNotes'] = climatologyNotes

    #==============
    #Units
    #==============
    calc_img = modify_units(calc_img, var, product, calculation, units)

    #==============
    #Extra template values
    #==============
    extra_template_values = {}

    #==============
    #Apply Mask
    #==============
    if mask != 'none':
        maskMin = float(maskMin)
        maskMax = float(maskMax)
        if mask == 'above':
            calc_img = ee.Image(calc_img).mask(calc_img.lte(maskMax))
        elif mask == 'below':
            calc_img = ee.Image(calc_img).mask(calc_img.gte(maskMin))
        elif mask == 'exterior':
            calc_img = ee.Image(calc_img).mask(
                calc_img.lte(maskMin).Or(calc_img.gte(maskMax)))
        elif mask == 'interior':
            calc_img = ee.Image(calc_img).mask(
                calc_img.gte(maskMin).And(calc_img.lte(maskMax)))

    #==============
    #Get mapid
    #==============
    mapid = {'mapid': [], 'token': []}
    ## Limit the image values to the min and max before visualizing
    ##here calc_img = ee.Image()
    vis_image = calc_img.clamp(float(minColorbar), float(maxColorbar))

    #we could change this to just if var in ['TrueColor', 'FalseColor'] instead of all the products
    if (product in [
            'L_TOA', 'L5_TOA', 'L7_TOA', 'L8_TOA', 'L_SR', 'L5_SR', 'L7_SR',
            'L8_SR'
    ] and var in ['TrueColor', 'FalseColor']):
        ## Hard code display of multi-band Landsat images (for now)
        ## Intentionally not using "vis_image" here to avoid clamp (for now)
        mapid = map_image(calc_img, TV['opacity'], None, minColorbar,
                          maxColorbar)
        ## Bands can be set separately by passing a string of comma separate numbers
        ##mapid = map_image(calc_img, TV['opacity'], None, "0, 0, 0", "0.28, 0.28, 0.32")
    #elif ((colorbarmap == 'invUSDM' or colorbarmap == 'USDM') and calculation == 'percentile'):
    #    value_list = [3,6,10,20,30,40]
    #    mapid = map_image(apply_sld_colormap_styling_image(vis_image, palette, colorbarType, value_list))
    #elif ((colorbarmap == 'invUSDMwWet' or colorbarmap == 'USDMwWet') and
    #      calculation == 'percentile'):
    #    value_list = [3,6,10,20,30,70,80,90,94,97,100]
    #    mapid = map_image(apply_sld_colormap_styling_image(vis_image, palette, colorbarType, value_list))
    elif calculation == 'anompercentof':
        value_list = [int(i) for i in colorbarTicks.split(",")][1:]
        #value_list = [5,25,50,70,90,110,130,150,200,400,800]
        mapid = map_image(
            apply_sld_colormap_styling_image(vis_image, palette, colorbarType,
                                             value_list))
    elif colorbarType == 'continuous':
        mapid = map_image(vis_image, TV['opacity'], palette, minColorbar,
                          maxColorbar)
    elif colorbarType == 'discrete':
        palette_list = palette.split(',')
        ## Intentionally add 1 to x since value needs to be for the max of the interval
        value_list = [
            float(minColorbar) + float(x + 1) *
            (float(maxColorbar) - float(minColorbar)) / len(palette_list)
            for x in xrange(len(palette_list))
        ]
        mapid = map_image(
            apply_sld_colormap_styling_image(vis_image, palette, colorbarType,
                                             value_list))

    if mapid and mapid['mapid'] and mapid['token']:
        extra_template_values['mapid'] = mapid['mapid']
        extra_template_values['token'] = mapid['token']
        TV.update(extra_template_values)

    #==============
    #Get point value
    #==============
    if toolAction == 'showSingleValueOnMap':
        point_value = get_point_value(calc_img, float(TV['pointLat']),
                                      float(TV['pointLong']), var)
        extra_template_values['pointValue'] = '{0:0.4f}'.format(point_value)
        TV.update(extra_template_values)

    #==============
    #Region data extraction
    #==============
    if toolAction == 'downloadRectangleSubset' or toolAction == 'downloadFusionTableSubset':
        #the rect coordinates here have been set for either rect or bounding box of fusion table
        NELat = TV['NELat']
        NELong = TV['NELong']
        SWLat = TV['SWLat']
        SWLong = TV['SWLong']
        rectangle = '[[' + SWLong + ',' + NELat + '],' + \
                     '[' + NELong + ',' + NELat + '],' + \
                     '[' + NELong + ',' + SWLat + '],' + \
                     '[' + SWLong + ',' + SWLat + ']]'
        downloadMapFormat = TV['downloadMapFormat']
        projection = TV['downloadProjection']

        if toolAction == 'downloadFusionTableSubset':
            fusiontable = TV['fusiontabledownload']
            fusiontablename = TV['fusiontabledownloadname']

            region = ee.FeatureCollection('ft:' + fusiontable)
            if fusiontablename:
                region = region.filter(ee.Filter.eq('Name', fusiontablename))
            calc_img = calc_img.clip(region.geometry())
            rectangle = json.dumps(
                region.geometry().bounds().getInfo()['coordinates'])

        downloadOptions = {
            'name': downloadFilename,
            'scale': scale,
            'crs': projection,
            'region': rectangle,
            'maxPixels': 1e9,
            #'format':downloadMapFormat
        }

        #this is a way to get .tif to be non-blank
        #if downloadMapFormat=='png' or downloadMapFormat=='jpg' or downloadMapFormat=='tif':
        #    vis_image = calc_img.visualize(
        #        bands=var, min=float(minColorbar), max=float(maxColorbar),
        #        palette=palette.split(',')) #palette must be array of strings, not a string
        #    downloadURL = vis_img.getDownloadUrl(downloadOptions)
        #elif downloadMapFormat=='tif':
        downloadURL = calc_img.getDownloadUrl(downloadOptions)

        # getDownloadURL is the preferred "spelling" for this function
        # Switch at some point when everyone updates their earthengine-api
        #downloadURL = ???Export.Image(,'title',{'region': rectangle})
        extra_template_values['downloadURL'] = downloadURL
        TV.update(extra_template_values)

    #==============
    #Update template values
    #==============
    return TV
Пример #16
0
def set_initial_template_values(self, applicationName):
    import loggerFunctions
    logger = loggerFunctions.set_logger('templatevariables')

    # Override saved maxDates with memcache values
    maxDates = collection_dataStore.defaults_maxDates
    for key, value in maxDates.iteritems():
        try:
            data = memcache.get(key)
            if data is not None:
                maxDates[key] = data
        except:
            pass

    timeSeriesCalc_default, subDomainTypeTS_default, ftChoice1_default, variable_default, productType_default, product_default, statistic_default, calculation_default, units_default, varUnits_default, colorbar_default, colorbarMin_default, colorbarMax_default, colorbarSize_default, colorbarTicks_default, timeperiod_default, timeperiod_days, scale_default, mapCenterLongLat_default, pointLat_default, pointLon_default, mapCenterLongLat, mapzoom_default, opacity_default, NELat_default, NELong_default, SWLat_default, SWLong_default, minYear_default, minDate_default, runningMeanYears_default, runningMeanDays_default, basemap_default, CHIRPSvariable_default = application_defaults.get_applicationDefaults(
        self, applicationName)

    #Set month/day/Start/End defaults according to dateStart/End
    #Set tempend
    variable = get_variable(self, 'variable', variable_default)
    product = get_variable(self, 'product', product_default)
    if variable in collection_dataStore.names_notdefault:
        variablename = variable
    else:
        variablename = 'default'
    tempend = maxDates[collection_dataStore.names_memcache[product]
                       [variablename]]
    #Set tempstart
    tempstart = (datetime.datetime(int(tempend[0:4]), int(tempend[5:7]),
                                   int(tempend[8:10])) -
                 datetime.timedelta(days=timeperiod_days)).strftime('%Y-%m-%d')
    #Set Month/Day defaults
    monthStart_default = tempstart[5:7].lstrip('0')
    dayStart_default = tempstart[8:10].lstrip('0')
    monthEnd_default = tempend[5:7].lstrip('0')
    dayEnd_default = tempend[8:10].lstrip('0')

    toolAction = get_variable(self, 'toolAction', 'getMap')
    if (toolAction == 'getTimeSeriesOverDateRange'):  #timeseries
        variable = get_variable(self, 'variableTS', variable_default)
        variable2display = get_variable(self, 'variable2display', 'none')
        productType = get_variable(self, 'productTypeTS', productType_default)
        product = get_variable(self, 'productTS', product_default)
        statistic = get_variable(self, 'statisticsTS', statistic_default)
        calculation = get_variable(self, 'calculation', calculation_default)
        units = get_variable(self, 'unitsTS', units_default)
        varUnits = get_variable(self, 'varUnitsTS', varUnits_default)
        timeperiod = get_variable(self, 'timeperiodTS', timeperiod_default)
        if variable2display != 'none':  #2 variables
            productType2 = get_variable(self, 'productType2TS',
                                        productType_default)
            product2 = get_variable(self, 'product2TS', product_default)
            variable2 = get_variable(self, 'variable2TS', variable_default)
            statistic2 = get_variable(self, 'statistic2TS', statistic_default)
            varUnits2 = get_variable(self, 'var2UnitsTS', varUnits_default)
            timeperiod2 = get_variable(self, 'timeperiod2TS',
                                       timeperiod_default)
        else:  #1 variable
            productType2 = productType
            product2 = product
            variable2 = variable
            statistic2 = statistic
            varUnits2 = varUnits
            timeperiod2 = timeperiod
    else:  #mapping
        variable = get_variable(self, 'variable', variable_default)
        productType = get_variable(self, 'productType', productType_default)
        product = get_variable(self, 'product', product_default)
        statistic = get_variable(self, 'statistic', statistic_default)
        calculation = get_variable(self, 'calculation', calculation_default)
        units = get_variable(self, 'units', units_default)
        varUnits = get_variable(self, 'varUnits', varUnits_default)
        timeperiod = get_variable(self, 'timeperiod', timeperiod_default)
        productType2 = productType
        product2 = product
        variable2 = variable
        statistic2 = statistic
        varUnits2 = varUnits
        timeperiod2 = timeperiod

    template_values = {
        'applicationName': applicationName,
        'form_error': {},
        #Sharelink breaks if get_all is used since lists can not be embedded in urls
        'layer': self.request.get_all('layer', [])
    }
    list_baseoptions = {
        'mask': 'none',
        'maskMin': '',
        'maskMax': '',
        'downloadURL': '',
        'downloadFilename': 'climateEngine_download',
        'downloadregion': 'noRegion',
        'downloadProjection': 'EPSG:4326',
        'mapid': '',
        'token': '',
        'dispEnv': '',
        #Variable Options
        'toolAction': 'getMap',
        'variable': variable,
        'variableTS': variable,
        'variable2TS': variable2,
        'variable2display': 'none',
        'productType': productType,
        'productTypeTS': productType,
        'productType2TS': productType2,
        'product': product,
        'productTS': product,
        'product2TS': product2,
        'statistic': statistic,
        'statisticTS': statistic,
        'statistic2TS': statistic2,
        'calculation': calculation,
        'units': units,
        'unitsTS': units,
        'varUnits': varUnits,
        'varUnitsTS': varUnits,
        'var2UnitsTS': varUnits2,
        'timeperiod': timeperiod,
        #Map Options
        'opacity': opacity_default,
        'mapCenterLongLat': mapCenterLongLat_default,
        'mapzoom': mapzoom_default,
        #Get Map Options
        'kmlurl': '',
        'kmlurl2': '',
        'kmloption': '',
        'scale': scale_default,
        'NELat': NELat_default,
        'NELong': NELong_default,
        'SWLat': SWLat_default,
        'SWLong': SWLong_default,
        #Colorbar Options
        'palette': '',
        'minColorbar': colorbarMin_default,
        'maxColorbar': colorbarMax_default,
        'colorbarmap': colorbar_default,
        'colorbarsize': colorbarSize_default,
        'colorbarLabel':
        '',  #technically this is dependent on variable(should ditch)
        'colorbarType': 'continuous',
        'colorbarTicks': colorbarTicks_default,
        #TimeSeries Options
        'timeSeriesCalc': timeSeriesCalc_default,
        'chartType': '',
        'subDomainTypeTS': subDomainTypeTS_default,
        'basemap': basemap_default
    }
    add_templateVariable(self, list_baseoptions, template_values)
    #############################################
    ##     FEWSNET OPTIONS                     ##
    #############################################
    if applicationName == 'fewsNet' or applicationName == 'precisionGrazing' or applicationName == 'gddTool':
        template_values['CHIRPSvariable'] = CHIRPSvariable_default,
    #############################################
    ##      MACA OPTIONS                     ##
    #############################################
    list_MACA = {
        'model': 'inmcm4',
        'scenario': 'historical',
        'modelTS': 'inmcm4',
        'scenarioTS': 'historical',
        'model2TS': 'inmcm4',
        'scenario2TS': 'historical'
    }
    add_templateVariable(self, list_MACA, template_values)

    #############################################
    ##      TEMPLATE TIME OPTIONS              ##
    #############################################
    for p in ['product', 'product2']:
        product = self.request.get(p, product_default)
        yearStartClim = collection_dataStore.defaults_yearClim[product][
            'yearStartClim']
        yT = collection_dataStore.defaults_yearClim[product]['yearTarget']
        yearEndClim = collection_dataStore.defaults_yearClim[product][
            'yearEndClim']
        if yearEndClim == 'default':
            yearEndClim = tempend[0:4]
        yearStartClimFut = collection_dataStore.defaults_yearClim['MACA'][
            'yearStartClimFut']
        yearEndClimFut = collection_dataStore.defaults_yearClim['MACA'][
            'yearEndClimFut']
        if p == 'product':
            yearTarget = yT
        if p == 'product2':
            yearTarget2 = yT

    mon_names = [
        'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct',
        'Nov', 'Dec'
    ]

    maxYear_default = tempend[0:4]
    maxDate_default = tempend

    list_timeoptions = {
        'minYear': minYear_default,
        'minDate': minDate_default,
        'maxDate': maxDate_default,
        'maxYear': maxYear_default,
        'dateStart': tempstart,
        'dateEnd': maxDate_default,
        'dateStartTS': tempstart,
        'dateEndTS': maxDate_default,
        'yearTargetData': yearTarget,
        'yearTargetFigure': yearTarget,
        'yearTargetForm': yearTarget,
        'yearStart': yearStartClim,
        'yearEnd': yearEndClim,
        'monthStart': monthStart_default,
        'monthEnd': monthEnd_default,
        'dayStart': dayStart_default,
        'dayEnd': dayEnd_default,
        'season': 'custom',
        'monthStartFut': monthStart_default,
        'monthEndFut': monthEnd_default,
        'dayStartFut': dayStart_default,
        'dayEndFut': dayEnd_default,
        'seasonFut': 'custom',
        'minYear2': minYear_default,
        'minDate2': minDate_default,
        'maxDate2': maxDate_default,
        'maxYear2': maxYear_default,
        'dateStart2TS': tempstart,
        'dateEnd2TS': maxDate_default,
        'yearTarget2Data': yearTarget2,
        'yearStart2': yearStartClim,
        'yearEnd2': yearEndClim,
        'monthStart2': monthStart_default,
        'monthEnd2': monthEnd_default,
        'dayStart2': dayStart_default,
        'dayEnd2': dayEnd_default,
        'season2': 'custom',
        'runningMeanYears': runningMeanYears_default,
        'runningMeanDays': runningMeanDays_default,
        'timeperiodTS': timeperiod,
        'timeperiod2TS': timeperiod2,
        'yearStartClim': yearStartClim,
        'yearEndClim': yearEndClim,
        'yearStartClimFut': yearStartClimFut,
        'yearEndClimFut': yearEndClimFut
    }
    add_templateVariable(self, list_timeoptions, template_values)

    #add maxValue form elements
    for key, value in collection_dataStore.maxDates_lookup.iteritems():
        add_templateVariable(self, {value: maxDates[key]}, template_values)

    d = ' %02d' % int(template_values['dayStart'])
    template_values['seasonStart'] = mon_names[
        int(template_values['monthStart']) - 1] + d
    d = ' %02d' % int(template_values['dayEnd'])
    template_values['seasonEnd'] = mon_names[int(template_values['monthEnd']) -
                                             1] + d
    d = ' %02d' % int(template_values['dayStart2'])
    template_values['seasonStart2'] = mon_names[
        int(template_values['monthStart2']) - 1] + d
    d = ' %02d' % int(template_values['dayEnd2'])
    template_values['seasonEnd2'] = mon_names[int(template_values['monthEnd2'])
                                              - 1] + d

    #if Time Series of Single Year Daily, override start/end dates if needed
    if template_values['timeSeriesCalc'] == 'intraannual':
        s_dt = datetime.datetime.strptime(template_values['dateStart'],
                                          '%Y-%m-%d')
        e_dt = datetime.datetime.strptime(template_values['dateEnd'],
                                          '%Y-%m-%d')
        if (e_dt - s_dt).days > 366:
            y = template_values['yearTargetData']
            m = template_values['monthStart']
            d = template_values['dayStart']
            template_values['dateStartTS'] = y + '-' + m + '-' + d
            template_values['dateEndTS'] = str(int(y) + 1) + '-' + m + '-' + d
            template_values['dateStart'] = template_values['dateStartTS']
            template_values['dateEnd'] = template_values['dateEndTS']
            s_dt = datetime.datetime.strptime(template_values['dateStart2TS'],
                                              '%Y-%m-%d')
            e_dt = datetime.datetime.strptime(template_values['dateEnd2TS'],
                                              '%Y-%m-%d')
        if (e_dt - s_dt).days > 366:
            y = template_values['yearTarget2Data']
            m = template_values['monthStart2']
            d = template_values['dayStart2']
            template_values['dateStart2'] = y + '-' + m + '-' + d
            template_values['dateEnd2TS'] = str(int(y) + 1) + '-' + m + '-' + d

    #############################################
    ##      TEMPLATE FUSION TABLE MAP DOWNLOAD       ##
    #############################################
    list_fusiontablemapdownload = {
        'fusiontabledownload': '1fRY18cjsHzDgGiJiS2nnpUU3v9JPDc2HNaR7Xk8',
        'fusiontabledownloadname': 'California',
        'pointLat': pointLat_default,
        'pointLong': pointLon_default,
        'pointValue': '',
        'downloadMapFormat': 'geotiff',
    }
    add_templateVariable(self, list_fusiontablemapdownload, template_values)

    #############################################
    ##      TEMPLATE POINTS       ##
    #############################################
    pointsLongLat_default = mapCenterLongLat
    marker_colors = [
        'blue', 'green', 'orange', 'purple', 'yellow', 'pink', 'red'
    ]
    mapCenterLongLat = template_values['mapCenterLongLat']
    template_values['marker_colors'] = marker_colors
    add_templateVariable(self, {'pointsLongLat': pointsLongLat_default},
                         template_values)
    for s_idx in range(1, max_pointsShapes + 1):
        add_templateVariable(self, {'p' + str(s_idx) + 'check': 'checked'},
                             template_values)
        add_templateVariable(self, {'p' + str(s_idx) + 'altname': ''},
                             template_values)
        add_templateVariable(self, {'p' + str(s_idx): mapCenterLongLat},
                             template_values)
        if s_idx == 1:
            add_templateVariable(self, {'p' + str(s_idx) + 'display': 'block'},
                                 template_values)
        else:
            add_templateVariable(self, {'p' + str(s_idx) + 'display': 'none'},
                                 template_values)

    #############################################
    ##      TEMPLATE SHAPES-FUSION TABLE FOR TIME SERIES       ##
    #############################################
    template_values['shape_colors'] = [
        '#0000FF', '#00FF00', '#FFA500', '#551A8B', '#FFFF00', '#FF69B4',
        '#FF0000'
    ]
    for s_idx in range(1, max_pointsShapes + 1):
        add_templateVariable(self, {'ft' + str(s_idx): ''}, template_values)
        add_templateVariable(self, {'ft' + str(s_idx) + 'columnName': ''},
                             template_values)
        add_templateVariable(self, {'ft' + str(s_idx) + 'altname': ''},
                             template_values)
        add_templateVariable(self, {'ftSubChoice' + str(s_idx): ''},
                             template_values)
        add_templateVariable(self, {'polygon' + str(s_idx): ''},
                             template_values)
        if s_idx == 1:
            add_templateVariable(self,
                                 {'ft' + str(s_idx) + 'display': 'block'},
                                 template_values)
            add_templateVariable(self,
                                 {'ft' + str(s_idx) + 'check': 'checked'},
                                 template_values)
            add_templateVariable(self,
                                 {'ftChoice' + str(s_idx): ftChoice1_default},
                                 template_values)
        else:
            add_templateVariable(self, {'ft' + str(s_idx) + 'display': 'none'},
                                 template_values)
            add_templateVariable(self, {'ft' + str(s_idx) + 'check': ''},
                                 template_values)
            add_templateVariable(self, {'ftChoice' + str(s_idx): ''},
                                 template_values)

    #############################################
    ##      TEMPLATE FORMS       ##
    #############################################
    template_forms = {
        #Forms
        'formMask': forms.formMask,
        'formVariable2Display': forms.formVariable2Display,
        'formTimePeriods': forms.formTimePeriods,
        'formHighChartLayers': forms.formHighChartLayers,
        'formHighChartLayersIntraannual': forms.formHighChartLayersIntraannual,
        'formDownloadMapFormat': forms.formDownloadMapFormat,
        'formDownloadProjection': forms.formDownloadProjection,
        'formSeasons': forms.formSeasons,
        'formChartType': forms.formChartType,
        'formMonth': forms.formMonth,
        'formDay': forms.formDay,
        'formMapZoom': forms.formMapZoom,
        'formPaletteCustomMap': forms.formPaletteCustomMap,
        'formPaletteDivMap': forms.formPaletteDivMap,
        'formPaletteSeqMap': forms.formPaletteSeqMap,
        'formPaletteSize': forms.formPaletteSize,
        'formColorbarType': forms.formColorbarType,
        'formOpacity': forms.formOpacity,
        'formUnits': forms.formUnits,
        'formTimeSeriesCalc': forms.formTimeSeriesCalc,
        'formSubDomainTypeTS': forms.formSubDomainTypeTS,
        'formDownloadRegion': forms.formDownloadRegion,
        'formFusionTableChoices': forms.formFusionTableChoices,
        'formProductType': forms.formProductType,
        'formProductType1': forms.formProductType1,
        'formLayers': forms.formLayers,
        #'formLayers1':                   forms.formLayers1,
        'formBoolean': forms.formBoolean
    }
    template_values.update(template_forms)

    #############################################
    ##      EXTRA FEWS                 ##
    #############################################
    if applicationName == 'fewsNet':
        extra_FEWS = {
            'formLayers': formsFEWSNET.formFEWSNETLayers,
            'formFusionTableChoices':
            formsFEWSNET.formFusionTableChoicesFEWSNET,
            'formCHIRPSChoicesFEWSNET': formsFEWSNET.formCHIRPSChoicesFEWSNET,
            'formMODISChoicesFEWSNET': formsFEWSNET.formMODISChoicesFEWSNET,
            'formLandsatChoicesFEWSNET':
            formsFEWSNET.formLandsatChoicesFEWSNET,
        }
        template_values.update(extra_FEWS)
    elif applicationName == 'precisionGrazing' or applicationName == 'gddTool':
        extra_FEWS = {
            'formLayers': formsPG.formPGLayers,
            'formFusionTableChoices': formsPG.formFusionTableChoicesPG,
            'formCHIRPSChoicesFEWSNET': formsFEWSNET.formCHIRPSChoicesFEWSNET,
            'formMODISChoicesFEWSNET': formsFEWSNET.formMODISChoicesFEWSNET,
            'formLandsatChoicesFEWSNET':
            formsFEWSNET.formLandsatChoicesFEWSNET,
            'formSubDomainTypeTS': formsPG.formSubDomainTypeTS
        }
        template_values.update(extra_FEWS)

    #############################################
    ##      SHARE LINK                         ##
    #############################################
    #Sharelink depends on most template variables
    template_values['logger'] = logger
    template_values['shareLink'] = set_share_link(template_values,
                                                  applicationName)
    #############################################
    ##      FORMAT TEMPLATE VARIABLES          ##
    #############################################
    #format template values to allow for different date formats etc...
    #See format_ functions in formchecks.py
    formatted_template_values = {}
    for key, val in template_values.iteritems():
        format_function_name = 'format_' + key
        try:
            format_function = getattr(forms, format_function_name)
        except:
            format_function = None

        if format_function:
            formatted_template_values[key] = format_function(val)
        else:
            formatted_template_values[key] = val
    return formatted_template_values