Esempio n. 1
0
def updateTurfModelFiles(factory, threat_key, replace_prev, verbose, debug):
    threat_fullname = factory.threatName(threat_key)

    if threat_key == 'hstress': periods = ('daily', )
    else: periods = ('daily', 'average')
    num_periods = len(periods)

    MODEL_START_TIME = datetime.datetime.now()

    common_obs_end = factory.commonObsEnd(threat_key, TODAY)
    max_obs_end = maxReanalysisEndTime(factory, threat_key)
    while max_obs_end > common_obs_end:  # most common case
        max_obs_end -= HOURS_IN_DAY
    max_obs_start = max_obs_end - HOURS_TO_START
    max_obs_date = max_obs_start.date()

    if debug:
        print '\nupdateTurfModelFiles debug info >>'
        print '     commonObsEnd :', common_obs_end
        print '      max_obs_end :', max_obs_end
        print '    max_obs_start :', max_obs_start
        print '     max_obs_date :', max_obs_date

    max_fcast_time = maxForecastEndTime(factory, threat_key)
    common_fcast_end = factory.commonFcastEnd(threat_key, max_fcast_time)

    if debug:
        print '   max_fcast_time :', max_fcast_time
        print '   commonFcastEnd :', common_fcast_end

    print '\nProcessing %s risk :' % threat_fullname

    period_count = 0
    for period_key in periods:
        PERIOD_START_TIME = datetime.datetime.now()

        key_dates = updateTurfModelPeriod(threat_key, period_key, max_obs_date,
                                          common_fcast_end, max_fcast_time,
                                          verbose, debug)
        # period_start, fcast_start, period_end = key_dates

        elapsed_time = elapsedTime(PERIOD_START_TIME, True)
        print '    Completed %s %s grid file update in %s' % (
            threat_fullname, period_key.title(), elapsed_time)
        if verbose:
            print '        data start = %s : fcast start = %s : data end = %s' % key_dates

        period_count += 1
        if period_count < num_periods: print ' '

    elapsed_time = elapsedTime(MODEL_START_TIME, True)
    print '\nCompleted %s grid updates in %s' % (threat_fullname, elapsed_time)
 def completeProcess(self, retcode):
     elapsed_time = elapsedTime(self.process_start_time, True)
     if retcode > 0:
         print FAILURE_MSG % (elapsed_time, self.active_script, retcode)
     else:
         print COMPLETE_MSG % (elapsed_time, self.active_script)
     sys.stdout.flush()
     self.active_script = None
Esempio n. 3
0
    def __call__(self, request):
        start_response = datetime.datetime.now()

        # decode request variables into a dictionary
        request_dict = self.requestAsDict(request)
        print '\n\nGrapeHardinessTempextHandler.__call__'

        # extract location coordinates
        location = self.extractLocationParameters(request_dict)
        lat, lon = location['coords']
        print '    location :\n', location

        target_year = request_dict.get('season', None)
        print '\n    target_year :', target_year

        # get the configured season limits
        dates = self.extractSeasonDates(request_dict)
        print '    dates :\n', dates

        season_start = asDatetimeDate(dates['season_start'])
        season_end = asDatetimeDate(dates['season_end'])
        target_year = dates['season']
        del dates['season']
        # initialize response string with season dates
        response = \
            '{"tempexts":{%s,"data":{' % self.tightJsonString(dates)[1:-1]

        reader = \
            self.tempextFileReader(target_year, self.source, self.region)
        if self.mode in ('dev', 'test'):
            print 'tempexts file :', reader.filepath
        # add recent averages
        data = \
        reader.getSliceAtNode('mint', season_start, season_end, lon, lat)
        response = \
            '%s"mint":%s' % (response, self.serializeData(data, '%.1f'))

        # add climate normal averages
        data = \
        reader.getSliceAtNode('maxt', season_start, season_end, lon, lat)
        response = \
            '%s,"maxt":%s' % (response, self.serializeData(data, '%.1f'))

        reader.close()
        del data
        if self.mode in ('dev', 'test'):
            print 'tempexts data retrieved in', elapsedTime(
                start_response, True)
        print '\n\nTEMPEXTS RESPONSE\n', response, '\n\n'

        self.respondWithJSON(request, '%s}}}' % response)
Esempio n. 4
0
    date_args = tuple([int(n) for n in args[0].split('.')])
    if utc_date:  # input date is already UTC corrected
        end_hour = datetime.datetime(*date_args)
    else:
        hour = tz.localize(datetime.datetime(*date_args))
        end_hour = hour.astimezone(pytz.utc)
else:
    hour = tz.localize(datetime.datetime.now())
    end_hour = hour.astimezone(pytz.utc)
if debug:
    print 'num_hours', num_hours
    print 'end_hour', end_hour

utc_hour = end_hour - datetime.timedelta(hours=num_hours - 1)
while utc_hour <= end_hour:
    if debug: print '\nprocessing download for', utc_hour
    files = downloadRTMA(utc_hour, server, verbose, debug)
    file_count += len(files)
    utc_hour += ONE_HOUR

elapsed_time = elapsedTime(download_start, True)
print '\ncompleted downloaded %d in %s' % (file_count, elapsed_time)

transport_dirpath = \
    '/Volumes/Transport/data/app_data/shared/reanalysis/conus/rtma'
if file_count > 0 and os.path.exists(transport_dirpath):
    command = \
        '/usr/bin/rsync -cgloprtuD %s %s' % (RTMA_DIRPATH, transport_dirpath)
    print '\n', command
    os.system(command)
Esempio n. 5
0
                  default=False)
parser.add_option('-n',
                  action='store_true',
                  dest='use_ndfd_cache',
                  default=False)
parser.add_option('-v', action='store_true', dest='verbose', default=False)
parser.add_option('-z', action='store_true', dest='debug', default=False)

options, args = parser.parse_args()

# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #

debug = options.debug
use_dev_paths = options.use_dev_paths
use_ndfd_cache = options.use_ndfd_cache
verbose = options.verbose or debug

factory = NDFDProjectFactory()
if use_dev_paths: factory.config.dirpaths.update(factory.config.dev_dirpaths)
if use_ndfd_cache: factory.setServerUrl(factory.ndfd_config.cache_server)

target_date, filepaths = factory.downloadLatestForecast(True)
if verbose:
    print 'NDFD grib files downloaded for %s :', str(target_date)
    for filepath in filepaths:
        print '    ', filepath

elapsed_time = elapsedTime(DOWNLOAD_START_TIME, True)
fmt = 'completed download for %s in %s'
print fmt % (target_date.isoformat(), elapsed_time)
Esempio n. 6
0
#factory = TurfThreatGridFileFactory(source, region, sub_region)
factory = TurfThreatJsonGeneratorFactory()
if dev_mode: factory.useDirpathsForMode('dev')

grand_total = 0

# generate JSON files for threats that have both daily and average risk data
generator = factory.riskFileGenerator('avg,daily', source, region, sub_region)

for threat in ('anthrac', 'bpatch', 'dspot', 'pblight'):
    threat_fullname = factory.threatName(threat)
    JSON_START_TIME = datetime.datetime.now()
    count = generator(threat, target_year, debug)

    if count > 0:
        elapsed_time = elapsedTime(JSON_START_TIME, True)
        info = (threat_fullname, count, elapsed_time)
        print 'Generated JSON files for %s @ %d grid nodes in %s' % info
        grand_total += count

    else:
        print 'No valid data for %s yet.' % threat_fullname

# generate JSON files for threats that have only daily risk data
threat_fullname = factory.threatName('hstress')
generator = factory.riskFileGenerator('daily', source, region, sub_region)

JSON_START_TIME = datetime.datetime.now()
count = generator('hstress', 'daily', target_year, debug)

if count > 0:
    if debug: print '    building hardtemp dataset'
    builder.config.datasets.hardtemp.chunks = chunks
    data = reader.dateSlice('hardiness.temp',
                            season_start,
                            season_end,
                            dtype=float,
                            missing=N.nan,
                            units='F')

    builder.open('a')
    builder.buildDataset('hardtemp',
                         data=data,
                         start_date=season_start,
                         end_date=season_end)
    builder.close()
    attrs = reader.dateAttributes('hardiness.temp')
    attrs['start_date'] = season_start_str
    attrs['end_date'] = season_end_str
    attrs['units'] = 'F'
    if debug: print '    setting hardtemp attributes :\n    ', attrs
    builder.open('a')
    builder.setDatasetAttributes('hardtemp', **attrs)
    builder.close()

# turn annoying numpy warnings back on
warnings.resetwarnings()

msg = '... finished building files for temp extremes and %d grape varieties in %s'
print msg % (len(project.varieties), elapsedTime(build_start, True))
        if len(N.where(N.isnan(daily))[0]) == 0:
            lat = N.round(lats[y, x], 3)
            lon = N.round(lons[y, x], 3)
            params = {'lat': lat, 'lon': lon}
            params['daily'] = ','.join(['%d' % value for value in daily])

            filename = json_filename % {
                'node': factory.gridNodeToFilename((lon, lat)),
            }
            filepath = os.path.join(json_dirpath, filename)

            with open(filepath, 'w') as writer:
                writer.write(json_template % params)

            num_files += 1
            if verbose: print num_files, filepath
        else:
            num_nans = len(N.where(N.isnan(daily))[0])
            location = (y, x, lons[y, x], lats[y, x])
            if num_nans == daily.size:
                print 'All NAN daily @ node[%s,%d] (%.5f, %.5f)' % location
            else:
                print '%d NANs in daily @ node[%s,%d] (%.5f, %.5f)' % (
                    num_nans, ) + location

threat_fullname = factory.threatName(threat)
elapsed_time = elapsedTime(JSON_START_TIME, True)
info = (threat_fullname, num_files, elapsed_time)
print '\nGenerated %s JSON files for %d grid nodes in %s' % info
from optparse import OptionParser
parser = OptionParser()

parser.add_option('-v', action='store_true', dest='verbose', default=False)
parser.add_option('-z', action='store_true', dest='debug', default=False)

options, args = parser.parse_args()

# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #

debug = options.debug
verbose = options.verbose
region_key = args[0]

factory = AcisProjectFactory()
bbox = factory.config.regions[region_key].data
source = factory.getSourceConfig('ndfd')

builder = factory.getStaticFileBuilder('ndfd', region_key)
builder.initFileAttributes()

data = factory.getAcisGridData(source_key, 'mint', date, None, False,
                               meta=('ll','elev'), bbox=bbox, debug=debug)
print builder.filepath

builder.build(True, True, data['lon'], data['lat'], elev_data=data['elev'],
              bbox=bbox)

elapsed_time = elapsedTime(BUILD_START_TIME, True)
print 'completed build of %s static file in' % source.tag, elapsed_time
Esempio n. 10
0
                 drawScatterMap(risk, lats, lons, **map_options)
        mapped = True
    else:
        index = reader._dateToIndex('risk', date)
        print 'No data for %s (index=%d)' % (date.strftime('%Y-%m-%d'), index)
        mapped = False

    if mapped:
        finishMap(fig, axes, fig1, **options)
        if verbose: print '    ', map_filepath
        else: print '    ', map_filename

        thumb_filename = thumbfile_template.replace('||DATE||', file_date_str)
        thumb_filepath = os.path.join(thumb_dirpath, thumb_filename)
        image = PIL.Image.open(map_filepath)
        image.thumbnail(thumbnail_shape, PIL.Image.ANTIALIAS)
        image.save(thumb_filepath, 'PNG', quality=100, optimize=True)
        if debug: print '    ', thumb_filepath

    date += ONE_DAY
    days += 1

# turn annoying numpy warnings back on
warnings.resetwarnings()

info = (days, start_date, end_date)
print '\nProcessed maps for %d days : %s thru %s' % info
elapsed_time = elapsedTime(MAPS_START_TIME, True)
info = (threat_fullname, elapsed_time)
print 'Completed %s risk maps in %s' % info
Esempio n. 11
0
    def __call__(self, request):
        start_response = datetime.datetime.now()

        # decode request variables into a dictionary
        request_dict = self.requestAsDict(request)

        # extract location coordinates
        location = request_dict['location']
        coords = location.get('coords', None)
        if coords is not None:
            lat = float(coords[0])
            lon = float(coords[1])
        else:
            lat = float(location['lat'])
            lon = float(location['lon'])

        # GDD threshold and target_year
        gdd_threshold = str(request_dict['gdd_threshold'])
        target_year = request_dict.get('season', None)
        #if target_year is None: target_year = datetime.date.today().year
        if target_year is None: target_year = self.maxAvailableYear()

        # get the configured season limits
        dates = self.extractSeasonDates(request_dict, target_year)

        # initialize the response
        response_json = \
            '{"season":{"gdd_threshold":"%s"' % gdd_threshold
        response_json = \
            '%s,"location":%s' % (response_json,self.tightJsonString(location))

        # create a POR file reader
        reader = \
        self.getTargetYearFileReader(target_year, self.source, self.region)
        if self.mode in ('dev', 'test'):
            print 'season data file :', reader.filepath

        # create path to GDD dataset
        dataset_path = reader.gddDatasetPath(gdd_threshold)

        # capture the significant dates for the dataset
        if 'dates' in self.mode_config \
        and target_year == self.mode_config.season:
            dates.update(self.mode_config.dates.attrs)
        else:
            dates.update(reader.getSignificantDates(dataset_path))
        season_end = dates['season_end']
        end_date = asDatetimeDate(season_end)
        start_date = asDatetimeDate(dates['season_start'])
        if 'fcast_start' in dates:
            fcast_start = asDatetimeDate(dates['fcast_start'])
            if fcast_start > end_date: del dates['fcast_start']
        if 'fcast_end' in dates:
            fcast_end = asDatetimeDate(dates['fcast_end'])
            if fcast_end > end_date:
                if 'fcast_start' in dates: dates['fcast_end'] = season_end
                else: del dates['fcast_end']
        last_obs = asDatetimeDate(dates['last_obs'])
        if last_obs > end_date: dates['last_obs'] = season_end
        last_valid = asDatetimeDate(dates['last_valid'])
        if last_valid > end_date:
            dates['last_valid'] = season_end
            last_valid = asDatetimeDate(season_end)

        # temporarily free up the POR file
        reader.close()

        # add season dates to response
        response_json = \
            '%s,"dates":%s' % (response_json, self.tightJsonString(dates))
        del dates

        # get the accumulated GDD for Y axis of data plots
        reader.open()
        response_json = '%s,"data":{"season":%s}}}' % (
            response_json,
            self.serializeData(
                reader.getDataAtNode(dataset_path, lon, lat, start_date,
                                     last_valid)))
        reader.close()
        if self.mode in ('dev', 'test'):
            print 'season data retrieved in ', elapsedTime(
                start_response, True)

        # send the response
        self.respond(request, response_json)
Esempio n. 12
0
        print "\nstatic file for %s region already exists, do you want to replace it ?" % region
        response = raw_input("Enter 'yes' or 'no' : ")
        if response in ('y', 'yes'):
            os.remove(ndfd_filepath)
            print ndfd_filepath, 'exists :', os.path.exists(ndfd_filepath)
            break
        elif response in ('n', 'no'):
            print 'Execution ending, will not replace', ndfd_filepath
            exit()

builder = NdfdStaticGridFileBuilder(conus_filepath,
                                    ndfd_filepath,
                                    CONFIG,
                                    region,
                                    tolerance=tolerance,
                                    debug=debug)

with warnings.catch_warnings():
    print '\nBuilding Acis datasets'
    builder.buildAcisDatasets(debug=debug)

    print '\nBuilding NDFD group datasets datasets'
    builder.buildNdfdGroup(debug=debug)

builder.close()

elapsed_time = elapsedTime(START_TIME, True)
fmt = 'completed creation of NDFD static file for %s in %s'
print fmt % (region, elapsed_time)
print 'filepath :', builder.filepath
                                       bbox=bbox,
                                       debug=debug)
        builder.open('a')
        builder.updateTempGroup(span_start, data['mint'], data['maxt'],
                                source.tag)
        builder.close()

        # incrment to next sequence of dates
        if INCREMENT is not None:
            span_start = span_end + ONE_DAY
            span_end = span_start + INCREMENT
        else:
            span_start = span_start + ONE_DAY

    del data
    elapsed_time = elapsedTime(year_start_time, True)
    print PERFORMANCE_MSG % (target_year, elapsed_time)

    if wait_time > 0 and num_years > 1:
        time.sleep(wait_time)
        total_wait_time += wait_time

# turn annoying numpy warnings back on
warnings.resetwarnings()

elapsed_time = elapsedTime(BUILD_START_TIME, True)
print 'completed build for %d years in %s' % (len(target_years), elapsed_time)
if total_wait_time > 0:
    msg = 'total wait time between %d years = %s seconds'
    print msg % (num_years, total_wait_time)
Esempio n. 14
0
    def __call__(self, request):
        print '\n\GrapeHardinessTempDataHandler.__call__'
        start_response = datetime.datetime.now()

        # decode request variables into a dictionary
        request_dict = self.requestAsDict(request)

        varieties = self.extractVarietyParameters(request_dict)
        variety = varieties['variety']

        # extract location coordinates
        location = self.extractLocationParameters(request_dict)
        if 'coords' in location:
            lat, lon = location['coords']
        else:
            lat = location['lat']
            lon = location['lon']

        # extract season date limits from request
        dates = self.extractSeasonDates(request_dict)
        target_year = dates['season']
        print '    target_year :', target_year
        print '    dates :\n', dates

        # create a variety file reader
        reader = self.varietyFileReader(variety, target_year, self.source,
                                        self.region, 'season')
        if self.mode in ('dev', 'test'):
            print 'hardtemp data file :', reader.filepath
        # path to the hardiness temp dataset
        dataset_path = 'hardtemp'

        # capture the significant dates for the dataset
        if 'dates' in self.mode_config \
        and target_year == self.mode_config.season:
            dates.update(self.mode_config.dates.attrs)
        else:
            dates.update(reader.significantDates(dataset_path))
        # temporarily free up the file
        reader.close()

        season_end = dates['season_end']
        end_date = asDatetimeDate(season_end)
        start_date = asDatetimeDate(dates['season_start'])
        if 'fcast_start' in dates:
            fcast_start = asDatetimeDate(dates['fcast_start'])
            if fcast_start > end_date: del dates['fcast_start']
        if 'fcast_end' in dates:
            fcast_end = asDatetimeDate(dates['fcast_end'])
            if fcast_end > end_date:
                if 'fcast_start' in dates: dates['fcast_end'] = season_end
                else: del dates['fcast_end']
        last_obs = asDatetimeDate(dates['last_obs'])
        if last_obs > end_date: dates['last_obs'] = season_end
        last_valid = asDatetimeDate(dates['last_valid'])
        if last_valid > end_date:
            dates['last_valid'] = season_end
            last_valid = asDatetimeDate(season_end)
        #dates = self.tightJsonString(dates)

        # get the accumulated GDD for Y axis of data plots
        reader.open()
        data = \
            reader.dataAtNode(dataset_path, lon, lat, start_date, last_valid)
        reader.close()
        if self.mode in ('dev', 'test'):
            print 'hardtemp data retrieved in ', elapsedTime(
                start_response, True)

        # initialize the response
        response_dict = {
            "hardtemp": {
                "variety": variety,
                "location": location,
                "dates": dates,
                "data": "data_array"
            }
        }
        response = self.tightJsonString(response_dict).replace('\\"', '"')
        response = \
            response.replace('"data_array"', self.serializeData(data, '%.1f'))

        # send the response
        self.respondWithJSON(request, response)
        ndfd_lats[i, j] = grib_lats[y, x]
        ndfd_lons[i, j] = neg_lons[y, x]

        if extreme_debug:
            print '\n\nmin_diff :', min_diff
            print 'nodes :', nodes
            print 'x_indexes :', x_indexes[i, j]
            print 'y_indexes :', y_indexes[i, j]
            print ' distance :', distance[i, j]
            print ' ndfd_lat :', ndfd_lats[i, j]
            print ' acis_lat :', acis_lats[i, j]
            print ' ndfd_lon :', ndfd_lons[i, j]
            print ' acis_lon :', acis_lons[i, j]
            if j > 5: exit()

elapsed_time = elapsedTime(COORD_MAPPING_START, True)
fmt = 'finished mapping NDFD grid nodes to ACIS grid in %s'
print fmt % elapsed_time

print '\nbuilding static file for %s region' % region
static_reader.open()
elevation = static_reader.get2DSlice('elev', bbox[0], bbox[2], bbox[1],
                                     bbox[3])
static_reader.close()
#filepath = static_factory.staticGridFilepath('ndfd', region)
#print "building", filepath

builder = ndfd_static_factory.ndfdStaticFileBuilder(region, dataset_shape)

with warnings.catch_warnings():
    builder.build(False, False, None, None, bbox=bbox, debug=debug)
Esempio n. 16
0
        thumb_filename = thumbfile_template.replace('||DATE||', file_date_str)
        thumb_filepath = os.path.join(thumb_dirpath, thumb_filename)
        image = PIL.Image.open(map_filepath)
        image.thumbnail(thumbnail_shape, PIL.Image.ANTIALIAS)
        if debug: print '    ', thumb_filepath
        image.save(thumb_filepath, 'PNG', quality=100, optimize=True)

        date += ONE_DAY
        maps += 1

    # turn annoying numpy warnings back on
    warnings.resetwarnings()

    info = (maps, start_date, end_date)
    print '\nProcessed maps for %d days : %s thru %s' % info
    elapsed_time = elapsedTime(MAPS_START_TIME, True)
    info = (treatment.description, threat.fullname, elapsed_time)
    print 'Completed %s %s control maps in %s' % info

    return maps


# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
# MAIN PROGRAM
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #

# factory for accessing data for maps
data_factory = TurfControlsFactory()
if dev_mode: data_factory.useDirpathsForMode('dev')

    # turn annoying numpy warnings back on
    warnings.resetwarnings()

    if len(missing_times) > 0:
        for missing in missing_times:
            reason, hour, filepath = missing
            info = (timeString(hour), reason, filepath.split('conus/')[1])
            print '\nDate missing for %s : %s\n    %s' % info
            success, message = grid_factory.repairMissingReanalysis(hour, grid_variable, grid_region)
            print '    %s' % message
            if success: total_hours += 1


    elapsed_time = elapsedTime(VARIABLE_START_TIME, True)
    msg = 'completed update of %d hours of "%s" data in %s'
    print msg % (total_hours, grid_variable, elapsed_time)

if update_rhum:

    if update_start_time > update_end_time:
        print 'RHUM grid file is already at latest available analysis time : %s' % update_end_time.strftime('%Y-%m-%d:%H')
        
    else: # update RHUM from TMP and DPT

        # filter annoying numpy warnings
        warnings.filterwarnings('ignore',"All-NaN axis encountered")
        warnings.filterwarnings('ignore',"All-NaN slice encountered")
        warnings.filterwarnings('ignore',"invalid value encountered in greater")
        warnings.filterwarnings('ignore',"invalid value encountered in less")
Esempio n. 18
0
    def __call__(self, request):
        start_response = datetime.datetime.now()

        # decode request variables into a dictionary
        request_dict = self.requestAsDict(request)

        # extract location coordinates
        location = request_dict['location']
        coords = location.get('coords', None)
        if coords is not None:
            lat = float(coords[0])
            lon = float(coords[1])
        else:
            lat = float(location['lat'])
            lon = float(location['lon'])

        # GDD threshold and target_year
        gdd_threshold = request_dict['gdd_threshold']
        target_year = request_dict.get('season', None)

        # get the configured season limits
        dates = self.extractSeasonDates(request_dict, target_year)
        season_start = asDatetimeDate(dates['season_start'])
        season_end = asDatetimeDate(dates['season_end'])
        target_year = dates['season']
        del dates['season']
        # initialize response string with season dates
        response = \
            '{"history":{%s,"data":{' % self.tightJsonString(dates)[1:-1]

        reader = self.getHistoryFileReader(target_year, self.source,
                                           self.region, gdd_threshold)
        if self.mode in ('dev', 'test'):
            print 'history file :', reader.filepath
        # add recent averages
        data = \
        reader.getSliceAtNode('recent', season_start, season_end, lon, lat)
        response = '%s"recent":%s' % (response, self.serializeData(data))

        # add climate normal averages
        data = \
        reader.getSliceAtNode('normal', season_start, season_end, lon, lat)
        response = '%s,"normal":%s' % (response, self.serializeData(data))

        # add period of record averages
        data = \
        reader.getSliceAtNode('por.avg', season_start, season_end, lon, lat)
        response = '%s,"poravg":%s' % (response, self.serializeData(data, 4))

        # add period of record - percent diffrence max GDD to average GDD
        data = \
        reader.getSliceAtNode('por.max', season_start, season_end, lon, lat)
        response = '%s,"pormax":%s' % (response, self.serializeData(data, 4))

        # add period of record - percent diffrence min GDD to average GDD
        data = \
        reader.getSliceAtNode('por.min', season_start, season_end, lon, lat)
        response = '%s,"pormin":%s' % (response, self.serializeData(data, 4))

        reader.close()
        del data
        if self.mode in ('dev', 'test'):
            print 'history data retrieved in', elapsedTime(
                start_response, True)

        self.respond(request, '%s}}}' % response)
Esempio n. 19
0
    row_y = []
    for col in range(num_cols):
        lat = lats[row, col]
        lon = lons[row, col]
        y, x, ndfdlon, ndfdlat, dist = \
            nearestNode(lon, lat, ndfd_lons, ndfd_lats, search_radius, debug)
        row_dist.append(dist)
        row_lat.append(ndfdlat)
        row_lon.append(ndfdlon)
        row_x.append(x)
        row_y.append(y)
        if debug: print row, col, y, x, lat, ndfdlat, lon, ndfdlon, dist
        if csv_filepath:
            line = fmt % (row, col, y, x, lat, ndfdlat, lon, ndfdlon, dist)
            csv_file.write(line)
    if verbose: print status % (row, elapsedTime(index_start_time, True))

    distance.append(row_dist)
    ndfd_x.append(row_x)
    ndfd_y.append(row_y)
    region_lats.append(row_lat)
    region_lons.append(row_lon)

elapsed_time = elapsedTime(index_start_time, True)
print 'completed indexing %d rows in %s' % (num_rows, elapsed_time)

del lats, lons, ndfd_lats, ndfd_lons

if csv_filepath:
    csv_file.close()
        for utc_hour in download_hours:
            info = (reanalysis_name, utc_hour.strftime('%Y-%m-%d:%H'))
            print '\nProcessing %s DATA download for %s' % info
            grib_filepath = downloadData(factory, utc_hour, grib_server,
                                         verbose, debug)
            if grib_filepath is not None:
                data_count += 1
                if debug: print '    saved DATA grib file to :', grib_filepath
                updateDataGrids(factory, utc_hour, grib_filepath, debug)

        # turn annoying numpy warnings back on
        warnings.resetwarnings()

        if data_count > 0:
            info = (data_count, reanalysis.upper(),
                    elapsedTime(SECTION_START, True))
            print '\nCompleted download of %d %s DATA grib files in %s' % info

# download precip files
pcpn_count = 0
pcpn_files = []

if download_pcpn:
    if start_hour is None:
        download_hours = determineTimespan(factory, end_hour, 'PCPN',
                                           grib_region, max_backward, verbose,
                                           debug)
        if len(download_hours) == 0:
            info = (end_hour.strftime('%Y=%m-%d:%H UTC'), max_backward,
                    reanalysis.upper())
            print '\nSearched from %s back %d hours. No previous %s DATA grib files were found.' % info
                    grid_variable).items():
                if not time_key in skip:
                    print '    %s = %s' % (time_key,
                                           hour.strftime('%Y-%m-%d:%H'))
            manager.close()
            print ' '

    else:
        if slice_end > slice_start:
            print timespan_warning % (slice_start.strftime('%Y-%m-%d:%H'),
                                      slice_end.strftime('%Y-%m-%d:%H'))
        else:
            print hour_warning % slice_start.strftime('%Y-%m-%d:%H')

# turn annoying numpy warnings back on
warnings.resetwarnings()

if len(missing_times) > 0:
    for missing in missing_times:
        reason, hour, filepath = missing
        info = (timeString(hour), reason, filepath.split('conus/')[1])
        print '\nDate missing for %s : %s\n    %s' % info
        success, message = \
            grid_factory.repairMissingReanalysis(hour, grid_variable, grid_region)
        print '    %s' % message
        if success: total_hours += 1

elapsed_time = elapsedTime(UPDATE_START_TIME, True)
msg = '\ncompleted update of %d hours of "%s" data in %s'
print msg % (total_hours, grid_variable, elapsed_time)
reader.close()
del reader

# create the average GDD arrays for each time span
for group in factory.config.filetypes.history50.groups:
    scope = group[1]['path']
    first_year, last_year = scopes[scope]
    span_start = datetime.datetime.now()

    msg = '\ngenerating %d (%d days) history of %s GDD extremes for %s scope'
    print msg % (target_year, days_in_history, coverage, scope)
    gdd_dict = buildGddArrays(factory, days_in_history, first_year, last_year,
                              source, region, gdd_threshold, coverage)

    # update the GDD datasets in the history file
    msg = '    updating %s dataset in %d %s GDD history file'
    manager = factory.getHistoryFileManager(target_year, source, region,
                                            gdd_threshold, coverage, 'a')
    for dataset_name, gdd_grid in gdd_dict.items():
        dataset_path = '%s.%s' % (scope, dataset_name)
        print msg % (dataset_path, target_year, gdd_threshold)
        manager.open('a')
        manager.updateDataset(dataset_path, 1, gdd_grid)
        manager.close()

    msg = '....finished %s GDD history in %s'
    print msg % (scope, elapsedTime(span_start, True))

# turn annoying numpy warnings back on
warnings.resetwarnings()
Esempio n. 23
0
# reprocess range of dates in the current year
elif num_args == 4:
    start_date = datetime.date(target_year, int(args[0]), int(args[1]))
    end_date = datetime.date(target_year, int(args[2]), int(args[3]))
# reprocess range of dates in a specific year
elif num_args == 5:
    start_date = datetime.date(target_year, int(args[1]), int(args[2]))
    end_date = datetime.date(target_year, int(args[3]), int(args[4]))

if end_date == start_date: end_date = None
else: end_date = min(end_date, last_valid_date)

# get the temperature datasets and close thereader
maxt = reader.getTimeSlice('temps.maxt', start_date, end_date)
mint = reader.getTimeSlice('temps.mint', start_date, end_date)
reader.close()
del reader

# get the appropriate GDD manager for the source
manager = factory.getPORFileManager(target_year, source, region, mode='a')
print 'recalculating gdd in', manager.filepath
# recalculate GDD and update the file
for threshold, th_string in gdd_thresholds:
    start_time = datetime.datetime.now()
    manager.open('a')
    manager.updateThresholdGroup(threshold, start_date, mint, maxt, source.tag,
                                 **kwargs)
    manager.close()
    elapsed_time = elapsedTime(start_time, True)
    print '    processed GDD %s in %s', (th_string, elapsed_time)
                stages = N.zeros(num_days, dtype='<i2')
                for index, threshold in enumerate(treatment.thresholds[1:]):
                    where = N.where(node_data >= threshold)
                    if len(where[0] > 0): stages[where] = index + 1
                control_json.append(
                    json_fmt % (name, ','.join(['%d' % s for s in stages])))

            params['data'] = '{%s}' % ','.join(control_json)

            with open(filepath, 'w') as writer:
                writer.write(json_template % params)

            num_files += 1
            if verbose: print num_files, filepath
        else:
            print 'ISSUE : All NAN node found at :', y, x, lons[y, x], lats[y,
                                                                            x]

    # turn annoying numpy warnings back on
    warnings.resetwarnings()

    total_files += num_files

    elapsed_time = elapsedTime(JSON_START_TIME, True)
    msg = 'Generated %d json files for %d treatments of %s in %s'
    print msg % (total_files, len(treatments), control.fullname, elapsed_time)

elapsed_time = elapsedTime(JOB_START_TIME, True)
msg = 'Generated a total of %d json files for %d threats in %s'
print msg % (total_files, num_threats, elapsedTime(JOB_START_TIME, True))