def getAcisGridData(self, elems, start_date, end_date=None, meta=None, bbox=None, grid=None, debug=False): if bbox is None: bbox = fromConfig('default.bbox.data') if grid is None: grid = fromConfig('default.acis_grid') query = '{"grid":"%d","bbox":"%s"' % (grid, bbox) _start_date = asAcisQueryDate(start_date) if end_date is not None: _end_date = asAcisQueryDate(end_date) query += ',"sdate":"%s","edate":"%s"' % (_start_date, _end_date) else: query += ',"date":"%s"' % _start_date if isinstance(elems, basestring): if ',' in elems: _elems = elems.split(',') else: _elems = elems if isinstance(_elems, (list,tuple)): _elems_ = [ ] for name in _elems: _elems_.append('{"name":"%s"}' % name) query += ',"elems":[%s]' % ','.join(_elems_) else: query += ',"elems":"%s"' % _elems if isinstance(meta, basestring): query += ',"meta":"%s"' % meta elif isinstance(meta, (list,tuple)): query += ',"meta":"%s"' % ','.join(meta) query += '}' if debug: print 'factory.getAcisGridData :\n', query client = AcisGridDataClient(debug=debug) # returns python dict { 'meta' = { "lat" : grid, 'lon' : grid } # 'data' = [ [date string, mint_grid] ] query_result = json.loads(client.query(query)[0]) return self.unpackQueryResults(query_result, _elems, meta)
def additionalFileAttributes(self, **kwargs): attrs = { } attrs['target_year'] = self.target_year attrs['end_date'] = asAcisQueryDate(self.end_date) attrs['start_date'] = asAcisQueryDate(self.start_date) attrs['num_days'] = self.num_days return attrs
def initTempDatasets(self, group, temp_attrs, **kwargs): """ create empty temperature datasets """ default = self.default group_description = '%s Temperatures' % group.title() # extract date attributes from inputs end_str = asAcisQueryDate(self.end_date) num_days = (self.end_date - self.start_date).days + 1 start_str = asAcisQueryDate(self.start_date) # extract attributes used to create dataset chunks = kwargs.get('chunks', (1, ) + self.lons.shape) compression = kwargs.get('compression', 'gzip') dtype = kwargs.get('dtype', '<i2') shape = kwargs.get('shape', (num_days, ) + self.lons.shape) verbose = kwargs.get('verbose', False) # create the group if group not in self._group_names: if verbose: print 'creating', group self.open('a') self.createGroup(group, description=group_description) self.close() # create the temprature datasets temp_datasets = kwargs.get('temp_datasets', default.temp_datasets) for key, name in temp_datasets: full_dataset_path = '%s.%s' % (group, key) if full_dataset_path in self._dataset_names: continue # creat emepty temperature dataset description = '%s %s temperature' % (name, group.title()) if verbose: print '\nCreating empty %s dataset' % full_dataset_path self.open('a') self.createEmptyDataset( full_dataset_path, shape, dtype, temp_attrs['missing'], description=description, chunks=chunks, compression=compression, ) self.setDatasetAttributes(full_dataset_path, **temp_attrs) self.close() # create empty provenance dataset full_dataset_path = '%s_provenance' % full_dataset_path description = 'Provenance for %s' % description.lower() self.open('a') self._createEmptyProvenance(full_dataset_path, 'temp', description, verbose) self.close()
def _resolveDateAttributes(self, dataset, **kwargs): attrs = { } if self.target_year: start_date = kwargs.get('start_date', self._projectStartDate(self.target_year, **dataset)) attrs['start_date'] = asAcisQueryDate(start_date) end_date = kwargs.get('end_date', self._projectEndDate(self.target_year, **dataset)) attrs['end_date'] = asAcisQueryDate(end_date) return attrs
def _resolveDateAttributes(self, dataset, **kwargs): time_attrs = {} for key, value in kwargs.items(): if key.endswith('_date'): time_attrs[key] = asAcisQueryDate(value) if self.target_year: if 'start_date' not in time_attrs: date = self._projectStartDate(self.target_year, **dataset) time_attrs['start_date'] = asAcisQueryDate(date) if 'end_date' not in time_attrs: date = self._projectEndDate(self.target_year, **dataset) time_attrs['end_date'] = asAcisQueryDate(date) return time_attrs
def _initModelGroup(self, model_name, verbose=False): group_name = self.modelGroupName(model_name) if group_name not in self._group_names: print 'creating', group_name, 'group' attributes = { 'chill_model': self.modelName(model_name), 'start_date': asAcisQueryDate(self.start_date), 'end_date': asAcisQueryDate(self.end_date), 'description': chillModelDescription(model_name), } self.open('a') self.createGroup(self.modelGroupName(model_name), **attributes) self.close() self._registerModel(model_name)
def _initFileAttributes(self, **kwargs): _kwargs_ = self._validateKwargs(kwargs) end_str = asAcisQueryDate(self.end_date) start_str = asAcisQueryDate(self.start_date) created = kwargs.get('created', self.timestamp) bbox = kwargs.get('data_bbox', None) if bbox is None: bbox = fromConfig('default.bbox.data') self.open('a') self.setFileAttributes(created=created, target_year=self.target_year, start_date=start_str, end_date=end_str, data_bbox=bbox) self.close()
def mapFilename(date, variety, model_name, map_group, map_type, lo_gdd_th=None, hi_gdd_th=None, test_path=False): params = { 'model': nameToFilepath(model_name), } if isinstance(date, basestring): params['date_str'] = date else: params['date_str'] = asAcisQueryDate(date) if variety is not None: template = fromConfig('crops.apple.filenames.maps.variety') params['variety'] = nameToFilepath(varietyName(variety)) else: template = fromConfig('crops.apple.filenames.maps.%s' % map_group) if map_type == 'gdd': params['map_type'] = 'GDD' else: params['map_type'] = nameToFilepath(map_type) if lo_gdd_th is not None: params['thresholds'] = gddThresholdName(lo_gdd_th, hi_gdd_th) return template % params
def dormancyProvenanceGenerator(date, timestamp, data): provenance = [ asAcisQueryDate(date), ] for stage in range(1, num_stages + 1): provenance.append(len(N.where(data == stage)[0])) provenance.append(timestamp) return tuple(provenance)
def webGraphicFilename(date, variety, graphic_key, ext, parse_keywords=True): params = { 'variety': nameToFilepath(variety.description), 'ext': ext } if isinstance(date, basestring): params['date_str'] = date else: params['date_str'] = asAcisQueryDate(date) template = fromConfig('crops.grape.filenames.web_graphic.variety') if parse_keywords: params['keywords'] = nameToFilepath(graphic_key) else: params['keywords'] = graphic_key return template % params
def _emptyProvenanceRecords_(self, start_date, num_days, empty_prov_record): record_tail = empty_prov_record[1:] records = [ ] for day in range(num_days): date = start_date + relativedelta(days=day) record = (asAcisQueryDate(date),) + record_tail records.append(record) return records
def _stageProvenanceGenerator_(self, date, timestamp, stages): # start with observation date and processing timestamp record = [ asAcisQueryDate(date), ] # add count for each phenological stage for stage in range(len(self.stages) + 1): record.append(len(N.where(stages == stage)[0])) record.append(timestamp) return tuple(record)
def _generateEmptyDateProvenance(self, provenance, attrs): records = [] record_tail = provenance.empty[1:] date = asDatetimeDate(attrs.get('start_date', self.start_date)) while date <= self.end_date: record = (asAcisQueryDate(date), ) + record_tail records.append(record) date += ONE_DAY return records
def _createEmptyProvenance(self, dataset_path, prov_key, description, verbose=False): end_date = self.end_date end_str = asAcisQueryDate(end_date) start_date = self.start_date start_str = asAcisQueryDate(start_date) num_days = (end_date - start_date).days + 1 empty_record = self.provenance.empty[prov_key] formats = self.provenance.formats[prov_key] names = self.provenance.names[prov_key] if names[0] in ('date', 'obs_date'): record_tail = empty_record[1:] records = [] for day in range(num_days): date = start_date + relativedelta(days=day) record = (asAcisQueryDate(date), ) + record_tail records.append(record) else: records = [empty_record for day in range(num_days)] if verbose: print '\ncreating empty provenance for', prov_key print 'names', formats print 'formats', formats print 'empty record', empty_record print 'record 0', records[0] print 'record -1', records[-1], '\n' empty = N.rec.fromrecords(records, shape=(num_days, ), formats=formats, names=names) self.createDataset(dataset_path, empty, raw=True) self.setDatasetAttributes(dataset_path, start_date=start_str, end_date=end_str, description=description)
def _killProvenanceGenerator_(self, date, timestamp, levels): # start with observation date and processing timestamp record = [ asAcisQueryDate(date), ] # add count for no kill record.append(len(N.where(levels == 0)[0])) # add counts for each kill temp for kill in self.kill_levels: record.append(len(N.where(levels == kill)[0])) record.append(timestamp) return tuple(record)
def mapFilename(date, variety, map_key, map_type=None, test_path=False): params = { 'variety' : nameToFilepath(variety.description), } if isinstance(date, basestring): params['date_str'] = date else: params['date_str'] = asAcisQueryDate(date) if map_type is None and '.' in map_key: map_group, map_type = map_key.split('.') else: map_group = map_key if map_type == 'gdd': params['map_key'] = '%s-GDD' % nameToFilepath(map_group) else: params['map_key'] = '%s-%s' % ( nameToFilepath(map_group), nameToFilepath(map_type) ) template = fromConfig('crops.grape.filenames.maps.variety') return template % params
def mapFilepath(self, date, model, map_group, map_type, lo_gdd_th, hi_gdd_th, test_file=False): # get the map directory path and the template for the map file name map_dirpath = mapWorkingDir(self.target_year, self.variety.name, model.name, map_group, map_type, lo_gdd_th, hi_gdd_th, test_file) filename_template = mapFilename('%s', self.variety.name, model.name, map_group, map_type, lo_gdd_th, hi_gdd_th, test_file) filepath_template = map_dirpath + os.sep + filename_template return filepath_template % asAcisQueryDate(date)
def webMapFilename(date, variety, model_name, map_group, map_type, config_key='web_maps'): params = { 'model': nameToFilepath(model_name), } if isinstance(date, basestring): params['date_str'] = date else: params['date_str'] = asAcisQueryDate(date) if variety is not None: template = fromConfig('crops.apple.filenames.%s.variety' % config_key) params['variety'] = nameToFilepath(varietyName(variety)) else: template = \ fromConfig('crops.apple.filenames.%s.%s' % (config_key, map_group)) params['map_type'] = nameToFilepath(map_type) return template % params
def _resolveDateAttributes(self, **kwargs): end = kwargs.get('end_date', asAcisQueryDate(self.end_date)) if isinstance(end, datetime): end = asAcisQueryDate(end) start = kwargs.get('start_date', asAcisQueryDate(self.start_date)) if isinstance(start, datetime): start = asAcisQueryDate(start) return {'start_date': start, 'end_date': end}
def plotKillVsStageAtPoint(self, lon, lat, model, start_date, end_date, lo_gdd_th, hi_gdd_th, mint, test_file=False, verbose=False): from dateutil.relativedelta import relativedelta from matplotlib import pyplot from matplotlib.ticker import Formatter # get the map directory path and the template for the map file name plot_dirpath = plotWorkingDir(self.target_year, self.variety.name, model.name, 'kill.at.stage', test_file) filename_template = '%s-Frost-Apple-%s-%%s-Kill-at-Stage-%s.png' filename_template = filename_template % ( asAcisQueryDate(start_date), self.variety.name, model.name.title()) filepath_template = plot_dirpath + os.sep + filename_template # get the map title template and initialize the map title title_template = '%s : %%s Kill at Stage\n%s\n\n%s\n%s' time_span = '%s thru %s' % (start_date.strftime('%B %d, %Y'), end_date.strftime('%B %d, %Y')) title_template = title_template % (self.variety.description, model.description, time_span, '%-7.3fW , %-6.3fN' % (lon, lat)) # get date indepenedent attributes and grids y, x = self.indexOfClosestNode(lon, lat) start_indx = self.indexFromDate(start_date) end_indx = self.indexFromDate(end_date) + 1 # turn start/end indexes into a list of dates days = [day for day in range(1, (end_indx - start_indx) + 1)] # create a date formatter for the X axis class DateFormatter(Formatter): def __init__(self, start_date): self.start_date = start_date def __call__(self, x, pos=0): if pos == 0: return '' date = self.start_date + relativedelta(days=(x - 1)) return '%d/%d' % (date.month, date.day) dateFormatter = DateFormatter(start_date) # get stage at node for each day dataset = self.modelDatasetPath(model.name, lo_gdd_th, hi_gdd_th, 'stage', 'index') stage_at_node = self.getDataset(dataset)[start_indx:end_indx, y, x] kill_levels = self.variety.kill_levels kill_temps = self.variety.kill_temps.attr_list min_stage_temp = min(kill_temps[0][-1], N.nanmin(mint)) stage_temps = [kills for kills in kill_temps] stage_temps.insert(0, (min_stage_temp, min_stage_temp, min_stage_temp)) var_config = fromConfig('crops.apple.variety') colors = var_config.maps.options.stage.colors stage_names = tuple(var_config.stage_name_map.attr_values) plot_options = var_config.plots.options.kill_at_stage mint_options = plot_options.mint.attrs stage_options = plot_options.stage.attrs # draw a plot for each kill level for indx, kill_level in enumerate(kill_levels): # initialize figure and GCA figure = pyplot.figure(figsize=(8, 6), dpi=100) axis = figure.gca() # set X axis date limits before we draw anything pyplot.xlim(days[0], days[-1]) # draw kill boundary for this kill level at each stage for stage, stage_kill in enumerate(kill_temps, start=1): kill_temp = stage_kill[indx] axis.plot([days[0], days[-1]], [kill_temp, kill_temp], c=colors[stage], label=stage_names[stage]) # draw a line showing the stage at each day stages = [stage_temps[stage][indx] for stage in stage_at_node] pyplot.plot(days, stages, **stage_options) # draw the mint overlay pyplot.plot(days, mint, **mint_options) # add X,Y axis labels, background grid and legend #axis.xaxis.set_major_locator = date_locator axis.xaxis.set_major_formatter(dateFormatter) #figure.autofmt_xdate() axis.set_ylabel('Temperature', fontsize=12) axis.grid(True) pyplot.legend(prop={'size': 6}, fancybox=True, framealpha=0.5) # draw the axes pyplot.axes(axis) # post title kill_percent = '%d%%' % kill_level title = title_template % kill_percent pyplot.suptitle(title, fontsize=12) # save to output file output_filepath = filepath_template % kill_percent figure.savefig(output_filepath) print 'plot saved to', output_filepath
'cmap': 'jet', 'colorbar': True, 'titleyoffset': 0.165, } kill_levels = variety.kill_levels kill_temps = variety.kill_temps.attr_list stage_names =\ tuple(fromConfig('crops.apple.variety.stage_name_map.attr_values')) for lo_gdd_th, hi_gdd_th in gdd_thresholds: date = start_date while date <= end_date: map_options['date'] = date date_str = asAcisQueryDate(date) mint = temp_manager.getTemp(mint_dataset, date) stage_grid = \ variety_manager.getStage(model.name, lo_gdd_th, hi_gdd_th, date) for stage, stage_name in enumerate(stage_names[1:], start=1): indexes = N.where(stage_grid == stage) if len(indexes[0]) > 0: for indx, kill_level in enumerate(kill_levels): percent = '%d%%' % kill_level map_options['title'] = title % (percent, stage_name) map_options['outputfile'] = \ filepath % (date_str, stage_name, percent) diff = float(kill_temps[indx]) - mint
def accumStatsProvenanceGenerator(date, timestamp, data_1, data_2): return (asAcisQueryDate(date), N.nanmin(data_1), N.nanmax(data_1), N.nanmean(data_1), SS.nanmedian(data_1, axis=None), N.nanmin(data_2), N.nanmax(data_2), N.nanmean(data_2), SS.nanmedian(data_2, axis=None), timestamp)
axis.hist(hard_temp.flatten(), bins, normed=False, histtype='bar', rwidth=0.8) axis.set_ylabel('Number of Nodes', fontsize=10) label_units = chr(176) + 'F' axis.set_xlabel('Temperature %s' % label_units.decode('latin1'), fontsize=10) axis.grid(True) pyplot.suptitle(title, fontsize=14) pyplot.title(date.strftime('%B %d, %Y'), fontsize=12) output_filepath = filepath % asAcisQueryDate(date) figure.savefig(output_filepath) print 'completed', output_filepath # need this stop Matplotlib from keeping each plot in figure memory pyplot.close() date += ONE_DAY sys.stdout.flush() if animate: print 'creating animation' png_path = '*Hardiness-Temp-Histogram.png' template = '%d-Frost-Grape-%s-Hardiness-Temp-Histogram-animation.gif' anim_filename = template % (target_year, nameToFilepath(variety.name)) anim_path = os.path.join(plot_dirpath, anim_filename) os.chdir(plot_dirpath) os.system('convert -delay %d %s -loop 0 %s' % (delay, png_path, anim_path))
# get temp manager and lot, lon data manager = factory.getTempGridManager(target_year, 'r', test_file) lats = manager.lats lons = manager.lons map_options = { 'area':'northeast', 'titleyoffset': 0.165, #'apply_mask':False, 'cmap':'jet', 'colorbar':True, } temp = manager.getTemp(temp_path, start_date, end_date, units=units) if end_date is None: map_options['autobounds'] = True map_options['date'] = start_date map_options['title'] = title map_options['outputfile'] = map_filepath % asAcisQueryDate(start_date) drawFilledContours(temp, lats, lons, **map_options) else: map_options['contourbounds'] = plot_bound(temp, 20) num_days = (end_date - start_date).days + 1 for day in range(num_days): date = start_date + relativedelta(days=day) map_options['date'] = date map_options['outputfile'] = map_filepath % asAcisQueryDate(date) day_temps = temp[day] map_options['title'] = title % (N.nanmin(day_temps),N.nanmax(day_temps)) drawFilledContours(day_temps, lats, lons, **map_options)
manager = factory.getTemperatureManager(target_year, 'r', test_file) # extract the time span for the target year start_year = target_year - 1 start_month, day = fromConfig('default.start_day') target_start_date = datetime(start_year, start_month, day) month, day = fromConfig('default.end_day') target_end_date = datetime(target_year, month, day) # download several days at a time start_date = target_start_date end_date = start_date + days_per_loop while start_date <= target_end_date: if end_date > target_end_date: end_date = target_end_date print 'downloading', asAcisQueryDate(start_date), asAcisQueryDate(end_date) # download historical temperatures for the time span data = factory.getAcisGridData('mint,maxt', start_date, end_date, None, data_bbox, acis_grid, debug=debug) # update the file's temperature grid manager.open('a') manager.updateTemp('reported.maxt', data['maxt'], start_date) manager.updateTemp('reported.mint', data['mint'], start_date) manager.close()
def tempExtremesProvenanceGenerator(date, timestamp, mint, maxt, source): return (asAcisQueryDate(date), N.nanmin(mint), N.nanmax(mint), N.nanmean(mint), N.nanmin(maxt), N.nanmax(maxt), N.nanmean(maxt), source, timestamp)
dataset, None, None, test_file) filepath_template = map_dirpath + os.sep + filename_template # get the map title template and initialize the map title title_template = fromConfig('crops.apple.chill.maps.titles.accumulated') title = title_template % { 'model': model.description, } # get GDD map options map_options = fromConfig('crops.apple.chill.maps.options.accumulated.attrs') # get date indepenedent attributes and grids from the stage grid manager manager = factory.getChillGridManager(target_year, 'r', test_file) lats = manager.lats lons = manager.lons date = start_date while date < past_date: map_options['title'] = title # date-specific map options map_options['date'] = date map_options['outputfile'] = filepath_template % asAcisQueryDate(date) # get GDD accumulations for the date and draw the map chill_grid = manager.getChill(model.name, dataset, date) chill_grid[N.where(chill_grid < 0.0)] = N.nan drawFilledContours(chill_grid, lats, lons, **map_options) date += ONE_DAY
def dateAccumStatsProvenanceGenerator(date, timestamp, daily, accumulated): return (asAcisQueryDate(date), N.nanmin(daily), N.nanmax(daily), N.nanmean(daily), nanmedian(daily, axis=None), N.nanmin(accumulated), N.nanmax(accumulated), N.nanmean(accumulated), nanmedian(accumulated, axis=None), timestamp)
def _initModelDatasets(self, group, datasets, group_description, verbose=False, **kwargs): """ Does the "heavy lifting" to initialize a group and it's datasets in a new file. Be careful, this method is primarily meant to be called by the group-sepcific methods above. WARNING : DO NOT USE IN AN FILE THAT ALREADY HAS THE GROUP OR IT'SDATASETS !!! IT WILL FAIL AND THE FILE MAY BECOME HOPELESSLY CORRUPTED. """ default = fromConfig('default') num_days = (self.end_date - self.start_date).days + 1 chunks = kwargs.get('chunks', (1,) + self.lons.shape) compression = kwargs.get('compression',default.compression) shape = kwargs.get('shape', (num_days,) + self.lons.shape) verbose = kwargs.get('verbose',False) # date attributes date_attrs = { 'start_date' : asAcisQueryDate(self.start_date), 'end_date' : asAcisQueryDate(self.end_date), } # create the group if group not in self._group_names: if verbose: print 'creating', group self.open('a') self.createGroup(group, description=group_description) self.setGroupAttributes(group, **date_attrs) self.close() # create datasets for dataset_name, ds_attrs in datasets: #ds_attrs = dict(ds_attrs) full_dataset_path = self.modelDatasetPath(group, dataset_name) if full_dataset_path not in self._dataset_names: if verbose: print 'creating', full_dataset_path ds_attrs.update(date_attrs) if 'node_spacing' not in ds_attrs: ds_attrs['node_spacing'] = '5 km' self.open('a') self.createEmptyDataset(full_dataset_path, shape, ds_attrs['dtype'], ds_attrs['missing'], chunks=chunks, compression=compression, description=ds_attrs['description']) del ds_attrs['dtype'], ds_attrs['description'] self.setDatasetAttributes(full_dataset_path, **ds_attrs) self.close() # create the chill provenance dataset prov_path = self.modelDatasetPath(group, 'provenance') if prov_path not in self._dataset_names: if verbose: print 'creating', prov_path prov_key = kwargs.get('prov_key', group) prov_description = kwargs.get('prov_description',None) if prov_description is None: prov_description = \ '%s processing provenance' % group_description.lower() self.open('a') self._createEmptyProvenance(prov_path, prov_key, prov_description, verbose) self.close()
def observedProvenanceGenerator(date, timestamp, data): return (asAcisQueryDate(date), N.nanmin(data), N.nanmax(data), N.nanmean(data), nanmedian(data, axis=None), timestamp)