def __init__(self, config_dict, name): # Raise KeyEror if name not in dictionary. source_dict = config_dict[name] self.enable = to_bool(source_dict.get('enable', False)) self.hostname = source_dict.get('hostname', '') self.port = to_int(source_dict.get('port', 80)) self.timeout = to_int(source_dict.get('timeout', 10))
def __init__(self, engine, config_dict): super(StdArchive, self).__init__(engine, config_dict) # Extract the various options from the config file. If it's missing, fill in with defaults: if 'StdArchive' in config_dict: self.data_binding = config_dict['StdArchive'].get('data_binding', 'wx_binding') self.record_generation = config_dict['StdArchive'].get('record_generation', 'hardware').lower() self.archive_delay = to_int(config_dict['StdArchive'].get('archive_delay', 15)) software_interval = to_int(config_dict['StdArchive'].get('archive_interval', 300)) self.loop_hilo = to_bool(config_dict['StdArchive'].get('loop_hilo', True)) else: self.data_binding = 'wx_binding' self.record_generation = 'hardware' self.archive_delay = 15 software_interval = 300 self.loop_hilo = True syslog.syslog(syslog.LOG_INFO, "engine: Archive will use data binding %s" % self.data_binding) syslog.syslog(syslog.LOG_INFO, "engine: Record generation will be attempted in '%s'" % (self.record_generation,)) # If the station supports a hardware archive interval, use that. # Warn if it is different than what is in config. ival_msg = '' try: if software_interval != self.engine.console.archive_interval: syslog.syslog(syslog.LOG_ERR, "engine: The archive interval in the" " configuration file (%d) does not match the" " station hardware interval (%d)." % (software_interval, self.engine.console.archive_interval)) self.archive_interval = self.engine.console.archive_interval ival_msg = "(specified by hardware)" except NotImplementedError: self.archive_interval = software_interval ival_msg = "(specified in weewx configuration)" syslog.syslog(syslog.LOG_INFO, "engine: Using archive interval of %d seconds %s" % (self.archive_interval, ival_msg)) if self.archive_delay <= 0: raise weewx.ViolatedPrecondition("Archive delay (%.1f) must be greater than zero." % (self.archive_delay,)) if self.archive_delay >= self.archive_interval / 2: syslog.syslog(syslog.LOG_WARNING, "engine: Archive delay (%d) is unusually long" % (self.archive_delay,)) syslog.syslog(syslog.LOG_DEBUG, "engine: Use LOOP data in hi/low calculations: %d" % (self.loop_hilo,)) self.setup_database(config_dict) self.bind(weewx.STARTUP, self.startup) self.bind(weewx.PRE_LOOP, self.pre_loop) self.bind(weewx.POST_LOOP, self.post_loop) self.bind(weewx.CHECK_LOOP, self.check_loop) self.bind(weewx.NEW_LOOP_PACKET, self.new_loop_packet) self.bind(weewx.NEW_ARCHIVE_RECORD, self.new_archive_record)
def trend(self, time_delta=None, time_grace=None, data_binding=None): """Returns a TrendObj that is bound to the trend parameters.""" if time_delta is None: time_delta = to_int(self.option_dict['trend'].get('time_delta', 10800)) if time_grace is None: time_grace = to_int(self.option_dict['trend'].get('time_grace', 300)) return TrendObj(time_delta, time_grace, self.db_lookup, data_binding, self.report_time, self.formatter, self.converter, **self.option_dict)
def __init__(self, engine, config_dict): super(StdArchive, self).__init__(engine, config_dict) # Extract the various options from the config file. If it's missing, fill in with defaults: if 'StdArchive' in config_dict: self.data_binding = config_dict['StdArchive'].get('data_binding', 'wx_binding') self.record_generation = config_dict['StdArchive'].get('record_generation', 'hardware').lower() self.archive_delay = to_int(config_dict['StdArchive'].get('archive_delay', 15)) software_interval = to_int(config_dict['StdArchive'].get('archive_interval', 300)) self.loop_hilo = to_bool(config_dict['StdArchive'].get('loop_hilo', True)) else: self.data_binding = 'wx_binding' self.record_generation = 'hardware' self.archive_delay = 15 software_interval = 300 self.loop_hilo = True syslog.syslog(syslog.LOG_INFO, "engine: Archive will use data binding %s" % self.data_binding) syslog.syslog(syslog.LOG_INFO, "engine: Record generation will be attempted in '%s'" % (self.record_generation,)) # If the station supports a hardware archive interval, use that. # Warn if it is different than what is in config. ival_msg = '' try: if software_interval != self.engine.console.archive_interval: syslog.syslog(syslog.LOG_ERR, "engine: The archive interval in the" " configuration file (%d) does not match the" " station hardware interval (%d)." % (software_interval, self.engine.console.archive_interval)) self.archive_interval = self.engine.console.archive_interval ival_msg = "(specified by hardware)" except NotImplementedError: self.archive_interval = software_interval ival_msg = "(specified in weewx configuration)" syslog.syslog(syslog.LOG_INFO, "engine: Using archive interval of %d seconds %s" % (self.archive_interval, ival_msg)) if self.archive_delay <= 0: raise weewx.ViolatedPrecondition("Archive delay (%.1f) must be greater than zero." % (self.archive_delay,)) syslog.syslog(syslog.LOG_DEBUG, "engine: Use LOOP data in hi/low calculations: %d" % (self.loop_hilo,)) self.setup_database(config_dict) self.bind(weewx.STARTUP, self.startup) self.bind(weewx.PRE_LOOP, self.pre_loop) self.bind(weewx.POST_LOOP, self.post_loop) self.bind(weewx.CHECK_LOOP, self.check_loop) self.bind(weewx.NEW_LOOP_PACKET, self.new_loop_packet) self.bind(weewx.NEW_ARCHIVE_RECORD, self.new_archive_record)
def __init__(self, config_dict, altitude_vt, latitude_f, longitude_f, db_binder=None): """Initialize the service.""" # Start with the default configuration. Make a copy --- we will be modifying it merge_dict = ConfigObj(StringIO(DEFAULTS_INI)) # Now merge in the overrides from the config file merge_dict.merge(config_dict) # Extract out the part we're interested in self.svc_dict = merge_dict['StdWXCalculate'] if db_binder is None: db_binder = weewx.manager.DBBinder(config_dict) self.db_manager = db_binder.get_manager(data_binding=self.svc_dict.get( 'data_binding', 'wx_binding'), initialize=True) self.ignore_zero_wind = to_bool( self.svc_dict.get('ignore_zero_wind', True)) # Instantiate a PressureCooker to calculate various kinds of pressure self.pressure_cooker = PressureCooker( altitude_vt, to_int(self.svc_dict.get('max_delta_12h', 1800)), self.svc_dict['Algorithms'].get('altimeter', 'aaASOS')) # Instantiate a RainRater to calculate rainRate self.rain_rater = RainRater( to_int(self.svc_dict.get('rain_period', 900)), to_int(self.svc_dict.get('retain_period', 930))) # Instantiate a WXXTypes object to calculate simple scalars (like dewpoint, etc.) self.wx_types = WXXTypes(self.svc_dict, altitude_vt, latitude_f, longitude_f) # Now add all our type extensions into the type system weewx.xtypes.xtypes.append(self.pressure_cooker) weewx.xtypes.xtypes.append(self.rain_rater) weewx.xtypes.xtypes.append(self.wx_types) # Report about which values will be calculated... log.info( "The following values will be calculated: %s", ', '.join([ "%s=%s" % (k, self.svc_dict['Calculations'][k]) for k in self.svc_dict['Calculations'] ])) # ...and which algorithms will be used. log.info( "The following algorithms will be used for calculations: %s", ', '.join([ "%s=%s" % (k, self.svc_dict['Algorithms'][k]) for k in self.svc_dict['Algorithms'] ]))
def __init__(self, database='', root='', **argv): """Initialize an instance of Connection. Parameters: file: Path to the sqlite file (required) fileroot: An optional path to be prefixed to parameter 'file'. If not given, nothing will be prefixed. If the operation fails, an exception of type weedb.OperationalError will be raised. """ self.file_path = os.path.join(root, database) if not os.path.exists(self.file_path): raise weedb.OperationalError("Attempt to open a non-existent database %s" % database) timeout = to_int(argv.get('timeout', 5)) isolation_level = argv.get('isolation_level') try: connection = sqlite3.connect(self.file_path, timeout=timeout, isolation_level=isolation_level) except sqlite3.OperationalError: # The Pysqlite driver does not include the database file path. # Include it in case it might be useful. raise weedb.OperationalError("Unable to open database '%s'" % (self.file_path,)) weedb.Connection.__init__(self, connection, database, 'sqlite')
def run(self): import weeutil.rsyncupload log_success = to_bool( weeutil.config.search_up(self.skin_dict, 'log_success', True)) log_failure = to_bool( weeutil.config.search_up(self.skin_dict, 'log_failure', True)) # We don't try to collect performance statistics about rsync, because # rsync will report them for us. Check the debug log messages. try: local_root = os.path.join( self.config_dict['WEEWX_ROOT'], self.skin_dict.get('HTML_ROOT', self.config_dict['StdReport']['HTML_ROOT'])) rsync_data = weeutil.rsyncupload.RsyncUpload( local_root=local_root, remote_root=self.skin_dict['path'], server=self.skin_dict['server'], user=self.skin_dict.get('user'), port=to_int(self.skin_dict.get('port')), ssh_options=self.skin_dict.get('ssh_options'), compress=to_bool(self.skin_dict.get('compress', False)), delete=to_bool(self.skin_dict.get('delete', False)), log_success=log_success, log_failure=log_failure) except KeyError: log.debug("rsyncgenerator: Rsync upload not requested. Skipped.") return try: rsync_data.run() except IOError as e: log.error("rsyncgenerator: Caught exception '%s': %s", type(e), e)
def week(self, data_binding=None, weeks_ago=0): week_start = to_int(self.option_dict.get('week_start', 6)) return TimespanBinder( weeutil.weeutil.archiveWeekSpan(self.report_time, week_start, weeks_ago=weeks_ago), self.db_lookup, data_binding=data_binding, context='week', formatter=self.formatter, converter=self.converter, **self.option_dict)
def rainyear(self, data_binding=None): rain_year_start = to_int(self.option_dict.get('rain_year_start', 1)) return TimespanBinder( weeutil.weeutil.archiveRainYearSpan(self.report_time, rain_year_start), self.db_lookup, data_binding=data_binding, context='rainyear', formatter=self.formatter, converter=self.converter, **self.option_dict)
def __init__(self, q, api_key, station=0, server_url=Windy.DEFAULT_URL, skip_upload=False, manager_dict=None, post_interval=None, max_backlog=sys.maxsize, stale=None, log_success=True, log_failure=True, timeout=60, max_tries=3, retry_wait=5): super(WindyThread, self).__init__(q, protocol_name='Windy', manager_dict=manager_dict, post_interval=post_interval, max_backlog=max_backlog, stale=stale, log_success=log_success, log_failure=log_failure, max_tries=max_tries, timeout=timeout, retry_wait=retry_wait) self.api_key = api_key self.station = to_int(station) self.server_url = server_url loginf("Data will be uploaded to %s" % self.server_url) self.skip_upload = to_bool(skip_upload)
def update_field(topic_dict, fieldinfo, field, value, unit_system): """ Update field. """ # pylint: disable=too-many-locals name = fieldinfo.get('name', field) append_unit_label = fieldinfo.get('append_unit_label', topic_dict.get('append_unit_label')) if append_unit_label: (unit_type, _) = weewx.units.getStandardUnitType(unit_system, name) unit_type = AbstractPublishThread.UNIT_REDUCTIONS.get( unit_type, unit_type) if unit_type is not None: name = "%s_%s" % (name, unit_type) unit = fieldinfo.get('unit', None) if unit is not None: (from_unit, from_group) = weewx.units.getStandardUnitType(unit_system, field) from_tuple = (value, from_unit, from_group) converted_value = weewx.units.convert(from_tuple, unit)[0] else: converted_value = value conversion_type = fieldinfo.get('conversion_type', topic_dict.get('conversion_type')) format_string = fieldinfo.get('format', topic_dict.get('format')) if conversion_type == 'integer': formatted_value = to_int(converted_value) else: formatted_value = format_string % converted_value if conversion_type == 'float': formatted_value = to_float(formatted_value) return name, formatted_value
def __init__(self, engine, config_dict): """Initialize the RainRater.""" super(StdRainRater, self).__init__(engine, config_dict) try: calc_dict = config_dict['StdWXCalculate'] except KeyError: calc_dict = {} rain_period = to_int(calc_dict.get('rain_period', 900)) retain_period = to_int(calc_dict.get('retain_period', 930)) self.rain_rater = RainRater(rain_period, retain_period) # Add to the XTypes system weewx.xtypes.xtypes.append(self.rain_rater) self.bind(weewx.NEW_LOOP_PACKET, self.new_loop_packet)
def __init__(self, svc_dict, altitude_vt, latitude, longitude): """Initialize an instance of WXXTypes Args: svc_dict: ConfigDict structure with configuration info altitude_vt: The altitude of the station as a ValueTuple latitude: Its latitude longitude: Its longitude """ self.svc_dict = svc_dict self.altitude_vt = altitude_vt self.latitude = latitude self.longitude = longitude # window of time for evapotranspiration calculation, in seconds self.et_period = to_int(svc_dict.get('et_period', 3600)) # atmospheric transmission coefficient [0.7-0.91] self.atc = to_float(svc_dict.get('atc', 0.8)) # Fail hard if out of range: if not 0.7 <= self.atc <= 0.91: raise weewx.ViolatedPrecondition("Atmospheric transmission " "coefficient (%f) out of " "range [.7-.91]" % self.atc) # atmospheric turbidity (2=clear, 4-5=smoggy) self.nfac = to_float(svc_dict.get('nfac', 2)) # height above ground at which wind is measured, in meters self.wind_height = to_float(svc_dict.get('wind_height', 2.0))
def create(database_name='', SQLITE_ROOT='', driver='', **argv): # @UnusedVariable """Create the database specified by the db_dict. If it already exists, an exception of type DatabaseExistsError will be thrown.""" file_path = _get_filepath(SQLITE_ROOT, database_name, **argv) # Check whether the database file exists: if os.path.exists(file_path): raise weedb.DatabaseExistsError("Database %s already exists" % (file_path, )) else: # If it doesn't exist, create the parent directories fileDirectory = os.path.dirname(file_path) if not os.path.exists(fileDirectory): try: os.makedirs(fileDirectory) except OSError: raise weedb.PermissionError("No permission to create %s" % fileDirectory) timeout = to_int(argv.get('timeout', 5)) isolation_level = argv.get('isolation_level') # Open, then immediately close the database. connection = sqlite3.connect(file_path, timeout=timeout, isolation_level=isolation_level) connection.close()
def __init__(self, database_name='', SQLITE_ROOT='', pragmas=None, **argv): """Initialize an instance of Connection. Parameters: database_name: The name of the Sqlite database. This is generally the file name SQLITE_ROOT: The path to the directory holding the database. Joining "SQLITE_ROOT" with "database_name" results in the full path to the sqlite file. pragmas: Any pragma statements, in the form of a dictionary. timeout: The amount of time, in seconds, to wait for a lock to be released. Optional. Default is 5. isolation_level: The type of isolation level to use. One of None, DEFERRED, IMMEDIATE, or EXCLUSIVE. Default is None (autocommit mode). If the operation fails, an exception of type weedb.OperationalError will be raised. """ self.file_path = get_filepath(SQLITE_ROOT, database_name, **argv) if not os.path.exists(self.file_path): raise weedb.OperationalError("Attempt to open a non-existent database %s" % self.file_path) timeout = to_int(argv.get('timeout', 5)) isolation_level = argv.get('isolation_level') try: connection = sqlite3.connect(self.file_path, timeout=timeout, isolation_level=isolation_level) except sqlite3.OperationalError: # The Pysqlite driver does not include the database file path. # Include it in case it might be useful. raise weedb.OperationalError("Unable to open database '%s'" % (self.file_path,)) if pragmas is not None: for pragma in pragmas: connection.execute("PRAGMA %s=%s;" % (pragma, pragmas[pragma])) weedb.Connection.__init__(self, connection, database_name, 'sqlite')
def get_concentrations(cfg: Configuration): for source in cfg.sources: if source.enable: record = collect_data(source.hostname, source.port, source.timeout, cfg.archive_interval) if record is not None: log.debug('get_concentrations: source: %s' % record) reading_ts = to_int(record['dateTime']) age_of_reading = time.time() - reading_ts if age_of_reading > cfg.archive_interval: log.info('Reading from %s:%d is old: %d seconds.' % ( source.hostname, source.port, age_of_reading)) continue log.debug('get_concentrations: record: %s' % record) concentrations = Concentrations( timestamp = reading_ts, pm_1_last = record['pm_1_last'], pm_2p5_last = record['pm_2p5_last'], pm_10_last = record['pm_10_last'], pm_1 = record['pm_1'], pm_2p5 = record['pm_2p5'], pm_10 = record['pm_10'], pm_2p5_nowcast = record['pm_2p5_nowcast'], pm_10_nowcast = record['pm_10_nowcast'], hum = record['hum'], temp = record['temp'], ) log.debug('get_concentrations: concentrations: %s' % concentrations) return concentrations log.error('Could not get concentrations from any source.') return None
def __init__(self, database_name='', SQLITE_ROOT='', pragmas=None, **argv): """Initialize an instance of Connection. Parameters: database_name: The name of the Sqlite database. This is generally the file name SQLITE_ROOT: The path to the directory holding the database. Joining "SQLITE_ROOT" with "database_name" results in the full path to the sqlite file. pragmas: Any pragma statements, in the form of a dictionary. timeout: The amount of time, in seconds, to wait for a lock to be released. Optional. Default is 5. isolation_level: The type of isolation level to use. One of None, DEFERRED, IMMEDIATE, or EXCLUSIVE. Default is None (autocommit mode). If the operation fails, an exception of type weedb.OperationalError will be raised. """ self.file_path = _get_filepath(SQLITE_ROOT, database_name, **argv) if not os.path.exists(self.file_path): raise weedb.NoDatabaseError( "Attempt to open a non-existent database %s" % self.file_path) timeout = to_int(argv.get('timeout', 5)) isolation_level = argv.get('isolation_level') connection = sqlite3.connect(self.file_path, timeout=timeout, isolation_level=isolation_level) if pragmas is not None: for pragma in pragmas: connection.execute("PRAGMA %s=%s;" % (pragma, pragmas[pragma])) weedb.Connection.__init__(self, connection, database_name, 'sqlite')
def __init__(self, engine, config_dict): """Initialize an instance of StdWXXTypes""" super(StdWXXTypes, self).__init__(engine, config_dict) altitude_vt = engine.stn_info.altitude_vt latitude_f = engine.stn_info.latitude_f longitude_f = engine.stn_info.longitude_f try: calc_dict = config_dict['StdWXCalculate'] except KeyError: calc_dict = {} # window of time for evapotranspiration calculation, in seconds et_period = to_int(calc_dict.get('et_period', 3600)) # atmospheric transmission coefficient [0.7-0.91] atc = to_float(calc_dict.get('atc', 0.8)) # atmospheric turbidity (2=clear, 4-5=smoggy) nfac = to_float(calc_dict.get('nfac', 2)) # height above ground at which wind is measured, in meters wind_height = to_float(calc_dict.get('wind_height', 2.0)) # Adjust wind direction to null, if the wind speed is zero: ignore_zero_wind = to_bool(calc_dict.get('ignore_zero_wind', False)) maxSolarRad_algo = calc_dict.get('Algorithms', { 'maxSolarRad': 'rs' }).get('maxSolarRad', 'rs').lower() self.wxxtypes = WXXTypes(altitude_vt, latitude_f, longitude_f, et_period, atc, nfac, wind_height, ignore_zero_wind, maxSolarRad_algo) # Add to the xtypes system weewx.xtypes.xtypes.append(self.wxxtypes)
def LMTweek(self, data_binding=None, weeks_ago=0, dayboundary=None): week_start = to_int(self.option_dict.get('week_start', 6)) return DayboundaryTimespanBinder( weekSpanTZ(self.lmt_tz, self.report_time, week_start, weeks_ago=weeks_ago), self.lmt, self.db_lookup, data_binding=data_binding, context='week', formatter=self.formatter, converter=self.converter, LMT=self.lmt, **self.option_dict)
def __init__(self, queue, host, port, user, password, measurement, platform, stream, loop_filters, protocol_name="WeeRT", post_interval=None, max_backlog=sys.maxsize, stale=None, log_success=True, log_failure=True, timeout=10, max_tries=3, retry_wait=5, retry_login=3600, softwaretype="weewx-%s" % weewx.__version__, skip_upload=False): """ Initializer for the WeeRThread class. Parameters specific to this class: host: port: The host and port of the WeeRT server user: password: The username and password to be send measurement: The InfluxDB measurement name to use. platform: The platform name stream: The stream name loop_filters: A data structure holding what values are to be emitted. """ super(WeeRTThread, self).__init__(queue, protocol_name=protocol_name, post_interval=post_interval, max_backlog=max_backlog, stale=stale, log_success=log_success, log_failure=log_failure, timeout=timeout, max_tries=max_tries, retry_wait=retry_wait, retry_login=retry_login, softwaretype=softwaretype, skip_upload=skip_upload) self.host = host self.port = to_int(port) self.user = user self.password = password self.measurement = measurement self.platform = platform self.stream = stream # Compile the filter functions for the loop packets: self.filter_funcs = _compile_filters(loop_filters)
def __init__(self, config_dict, skin_dict, *args, **kwargs): """Initialize an instance of HealthChecksGenerator""" weewx.reportengine.ReportGenerator.__init__(self, config_dict, skin_dict, *args, **kwargs) self.host = skin_dict.get('host', 'hc-ping.com') self.timeout = to_int(skin_dict.get('timeout', 10)) self.uuid = skin_dict.get('uuid') if not self.uuid: raise ValueError("uuid option is required.")
def week(self, data_binding=None): week_start = to_int(self.option_dict.get("week_start", 6)) return TimespanBinder( weeutil.weeutil.archiveWeekSpan(self.report_time, week_start), self.db_lookup, data_binding=data_binding, context="week", formatter=self.formatter, converter=self.converter, **self.option_dict )
def rainyear(self, data_binding=None): rain_year_start = to_int(self.option_dict.get("rain_year_start", 1)) return TimespanBinder( weeutil.weeutil.archiveRainYearSpan(self.report_time, rain_year_start), self.db_lookup, data_binding=data_binding, context="rainyear", formatter=self.formatter, converter=self.converter, **self.option_dict )
def filter_data(upload_all, templates, inputs, append_units_label, conversion_type, record): """ Filter and format data for publishing. """ # pylint: disable=invalid-name # if uploading everything, we must check the upload variables list # every time since variables may come and go in a record. use the # inputs to override any generic template generation. if upload_all: for f in record: if f not in templates: templates[f] = _get_template(f, inputs.get(f, {}), append_units_label, record['usUnits']) # otherwise, create the list of upload variables once, based on the # user-specified list of inputs. elif not templates: for f in inputs: templates[f] = _get_template(f, inputs[f], append_units_label, record['usUnits']) # loop through the templates, populating them with data from the record data = dict() for k in templates: try: v = float(record.get(k)) name = templates[k].get('name', k) fmt = templates[k].get('format', '%s') to_units = templates[k].get('units') if to_units is not None: (from_unit, from_group) = weewx.units.getStandardUnitType( record['usUnits'], k) from_t = (v, from_unit, from_group) v = weewx.units.convert(from_t, to_units)[0] if conversion_type == 'integer': s = to_int(v) else: s = fmt % v if conversion_type == 'float': s = to_float(s) data[name] = s except (TypeError, ValueError): pass # FIXME: generalize this if 'latitude' in data and 'longitude' in data: parts = [str(data['latitude']), str(data['longitude'])] if 'altitude_meter' in data: parts.append(str(data['altitude_meter'])) elif 'altitude_foot' in data: parts.append(str(data['altitude_foot'])) data['position'] = ','.join(parts) return data
def __init__(self, engine, config_dict): """Initialize the RainRater.""" super(StdRainRater, self).__init__(engine, config_dict) # Get any user-defined overrides try: override_dict = config_dict['StdWXCalculate']['RainRater'] except KeyError: override_dict = {} # Get the default values, then merge the user overrides into it option_dict = weeutil.config.deep_copy(defaults_dict['StdWXCalculate']['RainRater']) option_dict.merge(override_dict) rain_period = to_int(option_dict.get('rain_period', 900)) retain_period = to_int(option_dict.get('retain_period', 930)) self.rain_rater = RainRater(rain_period, retain_period) # Add to the XTypes system weewx.xtypes.xtypes.append(self.rain_rater) self.bind(weewx.NEW_LOOP_PACKET, self.new_loop_packet)
def get_concentrations(cfg: Configuration): for source in cfg.sources: if source.enable: record = collect_data(source.hostname, source.port, source.timeout, cfg.archive_interval, source.is_proxy) if record is not None: log.debug('get_concentrations: source: %s' % record) reading_ts = to_int(record['dateTime']) age_of_reading = time.time() - reading_ts if age_of_reading > cfg.archive_interval: log.info('Reading from %s:%d is old: %d seconds.' % (source.hostname, source.port, age_of_reading)) continue concentrations = Concentrations( timestamp=reading_ts, pm1_0=to_float(record['pm1_0_atm']), pm10_0=to_float(record['pm10_0_atm']), pm2_5_cf_1=to_float(record['pm2_5_cf_1']), pm2_5_cf_1_b= None, # If there is a second sensor, this will be updated below. current_temp_f=to_int(record['current_temp_f']), current_humidity=to_int(record['current_humidity']), ) # If there is a 'b' sensor, add it in and average the readings log.debug( 'get_concentrations: concentrations BEFORE averaing in b reading: %s' % concentrations) if 'pm1_0_atm_b' in record: concentrations.pm1_0 = (concentrations.pm1_0 + to_float( record['pm1_0_atm_b'])) / 2.0 concentrations.pm2_5_cf_1_b = to_float( record['pm2_5_cf_1_b']) concentrations.pm10_0 = (concentrations.pm10_0 + to_float( record['pm10_0_atm_b'])) / 2.0 log.debug('get_concentrations: concentrations: %s' % concentrations) return concentrations log.error('Could not get concentrations from any source.') return None
def __init__(self, engine, config_dict): # Initialize my superclass: super(XDR, self).__init__(engine, config_dict) # Extract our stanza from the configuration dictionary xdr_dict = config_dict.get('XDR', {}) # Extract stuff out of the resultant dictionary port = xdr_dict.get('port', '/dev/ttyACM0') baudrate = to_int(xdr_dict.get('baudrate', 9600)) timeout = to_int(xdr_dict.get('timeout', 5)) self.max_packets = to_int(xdr_dict.get('max_packets', 5)) self.sensor_map = xdr_dict.get('sensor_map', {}) loginf("Sensor map is %s" % self.sensor_map) self.queue = queue.Queue() self.bind(weewx.STARTUP, self.startup) self.bind(weewx.NEW_LOOP_PACKET, self.new_loop_packet) self.thread = XDRThread(self.queue, port, baudrate, timeout) self.thread.setDaemon(True)
def __init__(self, config_dict): super(PublishQueueThread, self).__init__('Queue') self.config_dict = config_dict self.service_dict = config_dict.get('MQTTPublish', {}).get('PublishQueue', {}) exclude_keys = ['password'] sanitized_service_dict = { k: self.service_dict[k] for k in set(list(self.service_dict.keys())) - set(exclude_keys) } logdbg(self.publish_type, "sanitized configuration removed %s" % exclude_keys) logdbg(self.publish_type, "sanitized_service_dict is %s" % sanitized_service_dict) self.binding = self.service_dict.get('data_binding', 'ext_queue_binding') self.mqtt_binding = self.service_dict.get('mqtt_data_binding', 'mqtt_queue_binding') self.catchup_count = int(self.service_dict.get('catchup_count', 10)) self.keepalive = to_int(self.service_dict.get('keepalive', 60)) self.wait_before_retry = float( self.service_dict.get('wait_before_retry', 2)) self.publish_interval = int( self.service_dict.get('publish_interval', 0)) self.publish_delay = int(self.service_dict.get('publish_delay', 0)) loginf(self.publish_type, "External queue data binding is %s" % self.binding) loginf(self.publish_type, "MQTT queue data binding is %s" % self.mqtt_binding) loginf(self.publish_type, "Wait before retry is %i" % self.wait_before_retry) loginf(self.publish_type, "Publish interval is %i" % self.publish_interval) loginf(self.publish_type, "Publish delay is %i" % self.publish_delay) self.topics_loop, self.topics_archive = self.configure_topics( self.service_dict) self.mids = {} self.threading_event = threading.Event() self.db_binder = weewx.manager.DBBinder(config_dict) self.dbm = None
def __init__(self, engine, config_dict): super(UradMon, self).__init__(engine, config_dict) loginf('service version is %s' % urad_version) udict = config_dict.get('UradMon', {}) self.udebug = to_bool(udict.get('urad_debug', False)) self.dbm_check = to_bool(udict.get('dbm_check', True)) self.rad_addr = udict.get('uradmon_address', '') self.binding = udict.get('binding', 'archive') self.data_binding = udict.get('data_binding', 'uradmon_binding') self.dbm = self.engine.db_binder.get_manager( data_binding=self.data_binding, initialize=True) sf_int = to_int(config_dict['StdArchive'].get('archive_interval', 300)) self.rec_interval = sf_int / 60 # convert to minute for database entry loginf("archive_interval in seconds is %s, minutes %s" % (sf_int, self.rec_interval)) # It is possible to use the old database with v0.2.x uradmon.py however # the dbm schema check will fail, so a workaround is to disable it for # that instance. You'll also need to point the weewx.conf database # stanza to the old database, probably uradmon.sdb (sqlite) or # uradmon (mysql) # To disable this check add # dbm_check = False # to the [UradMon] section in weewx.conf if self.dbm_check: # ensure schema on disk matches schema in memory dbcol = self.dbm.connection.columnsOf(self.dbm.table_name) dbm_dict = weewx.manager.get_manager_dict( config_dict['DataBindings'], config_dict['Databases'], self.data_binding) memcol = [x[0] for x in dbm_dict['schema']] if dbcol != memcol: logerr('schema mismatch: %s != %s' % (dbcol, memcol)) logerr('Skipping further uradmon queries until the database' + ' schema mismatch is fixed') return #raise Exception('schema mismatch: %s != %s' % # (dbcol, memcol)) loginf("uRADMonitor address is %s" % self.rad_addr) if self.rad_addr != '': if self.binding == 'archive': self.bind(weewx.NEW_ARCHIVE_RECORD, self.handle_new_archive) else: self.bind(weewx.NEW_LOOP_PACKET, self.handle_new_loop) else: loginf("No uRADMonitor address specified, skipping service")
def create(database_name='', SQLITE_ROOT='', driver='', **argv): """Create the database specified by the db_dict. If it already exists, an exception of type DatabaseExists will be thrown.""" file_path = get_filepath(SQLITE_ROOT, database_name, **argv) # Check whether the database file exists: if os.path.exists(file_path): raise weedb.DatabaseExists("Database %s already exists" % (file_path,)) else: # If it doesn't exist, create the parent directories fileDirectory = os.path.dirname(file_path) if not os.path.exists(fileDirectory): os.makedirs(fileDirectory) timeout = to_int(argv.get('timeout', 5)) isolation_level = argv.get('isolation_level') connection = sqlite3.connect(file_path, timeout=timeout, isolation_level=isolation_level) connection.close()
def _init_topic_dict(topic, site_dict, topic_dict, payload_type=None): topic_dict['skip_upload'] = site_dict['topics'][topic] \ .get('skip_upload', site_dict.get('skip_upload', False)) topic_dict['binding'] = site_dict['topics'][topic].get( 'binding', site_dict.get('binding', 'archive')) if payload_type is None: topic_dict['type'] = site_dict['topics'][topic] \ .get('type', site_dict.get('type', 'json')) else: topic_dict['type'] = payload_type topic_dict['append_units_label'] = to_bool(site_dict['topics'][topic] \ .get('append_units_label', site_dict.get('append_units_label', True))) topic_dict['conversion_type'] = site_dict['topics'][topic] \ .get('conversion_type', site_dict.get('conversion_type', 'string')) topic_dict['augment_record'] = to_bool(site_dict['topics'][topic] \ .get('augment_record', site_dict.get('augment_record', True))) usn = site_dict['topics'][topic].get( 'unit_system', site_dict.get('unit_system', None)) if usn is not None: topic_dict['unit_system'] = weewx.units.unit_constants[usn] loginf("for %s: desired unit system is %s" % (topic, usn)) topic_dict['upload_all'] = bool(site_dict['topics'][topic] \ .get('obs_to_upload', site_dict.get('obs_to_upload', 'all')).lower() == 'all') topic_dict['retain'] = to_bool(site_dict['topics'][topic].get( 'retain', site_dict.get('retain', False))) topic_dict['qos'] = to_int(site_dict['topics'][topic].get( 'qos', site_dict.get('qos', 0))) topic_dict['inputs'] = dict(site_dict['topics'][topic].get( 'inputs', site_dict.get('inputs', {}))) topic_dict['templates'] = dict() loginf("for %s binding to %s" % (topic, topic_dict['binding']))
def __init__(self, engine, config_dict): super(Purple, self).__init__(engine, config_dict) log.info("Service version is %s." % WEEWX_PURPLE_VERSION) self.engine = engine self.config_dict = config_dict.get('Purple', {}) self.cfg = Configuration( lock=threading.Lock(), concentrations=None, archive_interval=int( config_dict['StdArchive']['archive_interval']), archive_delay=to_int(config_dict['StdArchive'].get( 'archive_delay', 15)), poll_interval=5, sources=Purple.configure_sources(self.config_dict)) with self.cfg.lock: self.cfg.concentrations = get_concentrations(self.cfg) source_count = 0 for source in self.cfg.sources: if source.enable: source_count += 1 log.info( 'Source %d for PurpleAir readings: %s %s:%s, proxy: %s, timeout: %d' % (source_count, 'purple-proxy' if source.is_proxy else 'sensor', source.hostname, source.port, source.is_proxy, source.timeout)) if source_count == 0: log.error( 'No sources configured for purple extension. Purple extension is inoperable.' ) else: weewx.xtypes.xtypes.append(AQI()) # Start a thread to query proxies and make aqi available to loopdata dp: DevicePoller = DevicePoller(self.cfg) t: threading.Thread = threading.Thread(target=dp.poll_device) t.setName('Purple') t.setDaemon(True) t.start() self.bind(weewx.NEW_LOOP_PACKET, self.new_loop_packet)
def __init__(self, q, api_key, station=0, skip_upload=False, manager_dict=None, post_interval=None, max_backlog=maxsize, stale=None, log_success=True, log_failure=True, timeout=60, max_tries=3, retry_wait=5): super(ScenicThread, self).__init__(q, protocol_name='Scenic', manager_dict=manager_dict, post_interval=post_interval, max_backlog=max_backlog, stale=stale, log_success=log_success, log_failure=log_failure, max_tries=max_tries, timeout=timeout, retry_wait=retry_wait) self.api_key = api_key self.station = to_int(station) # TODO conver to alpha numeric self.server_url = DEFAULT_URL % (station, api) info("Data will be uploaded to %s" % self.server_url) self.skip_upload = to_bool(skip_upload)
def __init__(self, engine, config_dict): super(HealthChecksService, self).__init__(engine, config_dict) # service_dict = config_dict.get('HealthChecks', {}) skin_dict = self.config_dict.get('StdReport', {}).get('HealthChecks', {}) self.enable = to_bool(skin_dict.get('enable', True)) if not self.enable: loginf("Not enabled, exiting.") return self.host = skin_dict.get('host', 'hc-ping.com') self.timeout = to_int(skin_dict.get('timeout', 10)) self.uuid = skin_dict.get('uuid') if not self.uuid: raise ValueError("uuid option is required.") send_ping(self.host, self.uuid, self.timeout, "start")
def create(database_name='', SQLITE_ROOT='', driver='', **argv): # @UnusedVariable """Create the database specified by the db_dict. If it already exists, an exception of type DatabaseExistsError will be thrown.""" file_path = _get_filepath(SQLITE_ROOT, database_name, **argv) # Check whether the database file exists: if os.path.exists(file_path): raise weedb.DatabaseExistsError("Database %s already exists" % (file_path,)) else: # If it doesn't exist, create the parent directories fileDirectory = os.path.dirname(file_path) if not os.path.exists(fileDirectory): try: os.makedirs(fileDirectory) except OSError: raise weedb.PermissionError("No permission to create %s" % fileDirectory) timeout = to_int(argv.get('timeout', 5)) isolation_level = argv.get('isolation_level') # Open, then immediately close the database. connection = sqlite3.connect(file_path, timeout=timeout, isolation_level=isolation_level) connection.close()
def __init__(self, engine, config_dict): super(AirLink, self).__init__(engine, config_dict) log.info("Service version is %s." % WEEWX_AIRLINK_VERSION) self.engine = engine self.config_dict = config_dict.get('AirLink', {}) self.cfg = Configuration( lock=threading.Lock(), concentrations=None, archive_interval=int( config_dict['StdArchive']['archive_interval']), archive_delay=to_int(config_dict['StdArchive'].get( 'archive_delay', 15)), poll_interval=5, sources=AirLink.configure_sources(self.config_dict)) with self.cfg.lock: self.cfg.concentrations = get_concentrations(self.cfg) source_count = 0 for source in self.cfg.sources: if source.enable: source_count += 1 log.info('Source %d for AirLink readings: %s:%s, timeout: %d' % (source_count, source.hostname, source.port, source.timeout)) if source_count == 0: log.error( 'No sources configured for airlink extension. AirLink extension is inoperable.' ) else: weewx.xtypes.xtypes.append(AQI()) # Start a thread to query devices. dp: DevicePoller = DevicePoller(self.cfg) t: threading.Thread = threading.Thread(target=dp.poll_device) t.setName('AirLink') t.setDaemon(True) t.start() self.bind(weewx.NEW_LOOP_PACKET, self.new_loop_packet)
def generate(self, section, gen_ts): """Generate one or more reports for the indicated section. Each section in a period is a report. A report has one or more templates. section: A ConfigObj dictionary, holding the templates to be generated. Any subsections in the dictionary will be recursively processed as well. gen_ts: The report will be current to this time. """ ngen = 0 # Go through each subsection (if any) of this section, # generating from any templates they may contain for subsection in section.sections: # Sections 'SummaryByMonth' and 'SummaryByYear' imply summarize_by # certain time spans if not section[subsection].has_key('summarize_by'): if subsection == 'SummaryByDay': section[subsection]['summarize_by'] = 'SummaryByDay' elif subsection == 'SummaryByMonth': section[subsection]['summarize_by'] = 'SummaryByMonth' elif subsection == 'SummaryByYear': section[subsection]['summarize_by'] = 'SummaryByYear' # Call recursively, to generate any templates in this subsection ngen += self.generate(section[subsection], gen_ts) # We have finished recursively processing any subsections in this # section. Time to do the section itself. If there is no option # 'template', then there isn't anything to do. Return. if not section.has_key('template'): return ngen # Change directory to the skin subdirectory. We use absolute paths # for cheetah, so the directory change is not necessary for generating # files. However, changing to the skin directory provides a known # location so that calls to os.getcwd() in any templates will return # a predictable result. os.chdir(os.path.join(self.config_dict['WEEWX_ROOT'], self.skin_dict['SKIN_ROOT'], self.skin_dict['skin'])) report_dict = weeutil.weeutil.accumulateLeaves(section) (template, dest_dir, encoding, default_binding) = self._prepGen(report_dict) # Get start and stop times default_archive = self.db_binder.get_manager(default_binding) start_ts = default_archive.firstGoodStamp() if not start_ts: loginf('Skipping template %s: cannot find start time' % section['template']) return ngen if gen_ts: record = default_archive.getRecord(gen_ts, max_delta=to_int(report_dict.get('max_delta'))) if record: stop_ts = record['dateTime'] else: loginf('Skipping template %s: generate time %s not in database' % (section['template'], timestamp_to_string(gen_ts)) ) return ngen else: stop_ts = default_archive.lastGoodStamp() # Get an appropriate generator function summarize_by = report_dict['summarize_by'] if summarize_by in CheetahGenerator.generator_dict: _spangen = CheetahGenerator.generator_dict[summarize_by] else: # Just a single timespan to generate. Use a lambda expression. _spangen = lambda start_ts, stop_ts : [weeutil.weeutil.TimeSpan(start_ts, stop_ts)] # Use the generator function for timespan in _spangen(start_ts, stop_ts): start_tt = time.localtime(timespan.start) stop_tt = time.localtime(timespan.stop) if summarize_by in CheetahGenerator.format_dict: # This is a "SummaryBy" type generation. If it hasn't been done already, save the # date as a string, to be used inside the document date_str = time.strftime(CheetahGenerator.format_dict[summarize_by], start_tt) if date_str not in self.outputted_dict[summarize_by]: self.outputted_dict[summarize_by].append(date_str) # For these "SummaryBy" generations, the file name comes from the start of the timespan: _filename = self._getFileName(template, start_tt) else: # This is a "ToDate" generation. File name comes # from the stop (i.e., present) time: _filename = self._getFileName(template, stop_tt) # Get the absolute path for the target of this template _fullname = os.path.join(dest_dir, _filename) # Skip summary files outside the timespan if report_dict['summarize_by'] in CheetahGenerator.generator_dict \ and os.path.exists(_fullname) \ and not timespan.includesArchiveTime(stop_ts): continue # skip files that are fresh, but only if staleness is defined stale = to_int(report_dict.get('stale_age')) if stale is not None: t_now = time.time() try: last_mod = os.path.getmtime(_fullname) if t_now - last_mod < stale: logdbg("Skip '%s': last_mod=%s age=%s stale=%s" % (_filename, last_mod, t_now - last_mod, stale)) continue except os.error: pass searchList = self._getSearchList(encoding, timespan, default_binding) tmpname = _fullname + '.tmp' try: compiled_template = Cheetah.Template.Template( file=template, searchList=searchList, filter=encoding, filtersLib=weewx.cheetahgenerator) with open(tmpname, mode='w') as _file: print >> _file, compiled_template os.rename(tmpname, _fullname) except Exception, e: # We would like to get better feedback when there are cheetah # compiler failures, but there seem to be no hooks for this. # For example, if we could get make cheetah emit the source # on which the compiler is working, one could compare that with # the template to figure out exactly where the problem is. # In Cheetah.Compile.ModuleCompiler the source is manipulated # a bit then handed off to parserClass. Unfortunately there # are no hooks to intercept the source and spit it out. So # the best we can do is indicate the template that was being # processed when the failure ocurred. logerr("Generate failed with exception '%s'" % type(e)) logerr("**** Ignoring template %s" % template) logerr("**** Reason: %s" % e) weeutil.weeutil.log_traceback("**** ") else: ngen += 1 finally:
def generate(self, section, gen_ts): """Generate one or more reports for the indicated section. Each section in a period is a report. A report has one or more templates. section: A ConfigObj dictionary, holding the templates to be generated. Any subsections in the dictionary will be recursively processed as well. gen_ts: The report will be current to this time. """ ngen = 0 # Go through each subsection (if any) of this section, # generating from any templates they may contain for subsection in section.sections: # Sections 'SummaryByMonth' and 'SummaryByYear' imply summarize_by certain time spans if not section[subsection].has_key('summarize_by'): if subsection == 'SummaryByMonth': section[subsection]['summarize_by'] = 'SummaryByMonth' elif subsection == 'SummaryByYear': section[subsection]['summarize_by'] = 'SummaryByYear' # Call myself recursively, to generate any templates in this subsection ngen += self.generate(section[subsection], gen_ts) # We have finished recursively processing any subsections in this # section. Time to do the section itself. If there is no option # 'template', then there isn't anything to do. Return. if not section.has_key('template'): return ngen # Change directory to the skin subdirectory. We use absolute paths # for cheetah, so the directory change is not necessary for generating # files. However, changing to the skin directory provides a known # location so that calls to os.getcwd() in any templates will return # a predictable result. os.chdir(os.path.join(self.config_dict['WEEWX_ROOT'], self.skin_dict['SKIN_ROOT'], self.skin_dict['skin'])) report_dict = weeutil.weeutil.accumulateLeaves(section) (template, dest_dir, encoding, default_binding) = self._prepGen(report_dict) # Get start and stop times default_archive = self.db_binder.get_manager(default_binding) start_ts = default_archive.firstGoodStamp() if not start_ts: loginf('Skipping template %s: cannot find start time' % section['template']) return ngen if gen_ts: record = default_archive.getRecord(gen_ts, max_delta=to_int(report_dict.get('max_delta'))) if record: stop_ts = record['dateTime'] else: loginf('Skipping template %s; generate time %s not in database' % (section['template'], timestamp_to_string(gen_ts)) ) return ngen else: stop_ts = default_archive.lastGoodStamp() # Get an appropriate generator function summarize_by = report_dict['summarize_by'] if summarize_by in CheetahGenerator.generator_dict: _spangen = CheetahGenerator.generator_dict[summarize_by] else: # Just a single timespan to generate. Use a lambda expression. _spangen = lambda start_ts, stop_ts : [weeutil.weeutil.TimeSpan(start_ts, stop_ts)] # Use the generator function for timespan in _spangen(start_ts, stop_ts): # Save YYYY-MM so they can be used within the document if summarize_by in CheetahGenerator.generator_dict: timespan_start_tt = time.localtime(timespan.start) _yr_str = "%4d" % timespan_start_tt[0] if summarize_by == 'SummaryByMonth': _mo_str = "%02d" % timespan_start_tt[1] if _mo_str not in self.outputted_dict[summarize_by]: self.outputted_dict[summarize_by].append("%s-%s" % (_yr_str, _mo_str)) if summarize_by == 'SummaryByYear' and _yr_str not in self.outputted_dict[summarize_by]: self.outputted_dict[summarize_by].append(_yr_str) # figure out the filename for this template _filename = self._getFileName(template, timespan) _fullname = os.path.join(dest_dir, _filename) # Skip summary files outside the timespan if report_dict['summarize_by'] in CheetahGenerator.generator_dict \ and os.path.exists(_fullname) \ and not timespan.includesArchiveTime(stop_ts): continue # skip files that are fresh, but only if staleness is defined stale = to_int(report_dict.get('stale_age')) if stale is not None: t_now = time.time() try: last_mod = os.path.getmtime(_fullname) if t_now - last_mod < stale: logdbg("Skip '%s': last_mod=%s age=%s stale=%s" % (_filename, last_mod, t_now - last_mod, stale)) continue except os.error: pass searchList = self._getSearchList(encoding, timespan, default_binding) tmpname = _fullname + '.tmp' try: text = Cheetah.Template.Template(file=template, searchList=searchList, filter=encoding, filtersLib=weewx.cheetahgenerator) with open(tmpname, mode='w') as _file: print >> _file, text os.rename(tmpname, _fullname) except Exception, e: logerr("Generate failed with exception '%s'" % type(e)) logerr("**** Ignoring template %s" % template) logerr("**** Reason: %s" % e) weeutil.weeutil.log_traceback("**** ") else: ngen += 1 finally:
def genImages(self, gen_ts): """Generate the images. The time scales will be chosen to include the given timestamp, with nice beginning and ending times. gen_ts: The time around which plots are to be generated. This will also be used as the bottom label in the plots. [optional. Default is to use the time of the last record in the database.] """ t1 = time.time() ngen = 0 # Loop over each time span class (day, week, month, etc.): for timespan in self.image_dict.sections : # Now, loop over all plot names in this time span class: for plotname in self.image_dict[timespan].sections : # Accumulate all options from parent nodes: plot_options = weeutil.weeutil.accumulateLeaves( self.image_dict[timespan][plotname]) plotgen_ts = gen_ts if not plotgen_ts: binding = plot_options['data_binding'] archive = self.db_binder.get_manager(binding) plotgen_ts = archive.lastGoodStamp() if not plotgen_ts: plotgen_ts = time.time() image_root = os.path.join(self.config_dict['WEEWX_ROOT'], plot_options['HTML_ROOT']) # Get the path that the image is going to be saved to: img_file = os.path.join(image_root, '%s.png' % plotname) # Check whether this plot needs to be done at all: ai = plot_options.as_int('aggregate_interval') if plot_options.has_key('aggregate_interval') else None if skipThisPlot(plotgen_ts, ai, img_file) : continue # Create the subdirectory that the image is to be put in. # Wrap in a try block in case it already exists. try: os.makedirs(os.path.dirname(img_file)) except OSError: pass # Create a new instance of a time plot and start adding to it plot = weeplot.genplot.TimePlot(plot_options) # Calculate a suitable min, max time for the requested time # span and set it (minstamp, maxstamp, timeinc) = weeplot.utilities.scaletime(plotgen_ts - int(plot_options.get('time_length', 86400)), plotgen_ts) plot.setXScaling((minstamp, maxstamp, timeinc)) # Set the y-scaling, using any user-supplied hints: plot.setYScaling(weeutil.weeutil.convertToFloat(plot_options.get('yscale', ['None', 'None', 'None']))) # Get a suitable bottom label: bottom_label_format = plot_options.get('bottom_label_format', '%m/%d/%y %H:%M') bottom_label = time.strftime(bottom_label_format, time.localtime(plotgen_ts)) plot.setBottomLabel(bottom_label) # Set day/night display plot.setLocation(self.stn_info.latitude_f, self.stn_info.longitude_f) plot.setDayNight(to_bool(plot_options.get('show_daynight', False)), weeplot.utilities.tobgr(plot_options.get('daynight_day_color', '0xffffff')), weeplot.utilities.tobgr(plot_options.get('daynight_night_color', '0xf0f0f0')), weeplot.utilities.tobgr(plot_options.get('daynight_edge_color', '0xefefef'))) # Loop over each line to be added to the plot. for line_name in self.image_dict[timespan][plotname].sections: # Accumulate options from parent nodes. line_options = weeutil.weeutil.accumulateLeaves(self.image_dict[timespan][plotname][line_name]) # See what SQL variable type to use for this line. By # default, use the section name. var_type = line_options.get('data_type', line_name) # Look for aggregation type: aggregate_type = line_options.get('aggregate_type') if aggregate_type in (None, '', 'None', 'none'): # No aggregation specified. aggregate_type = aggregate_interval = None else : try: # Aggregation specified. Get the interval. aggregate_interval = line_options.as_int('aggregate_interval') except KeyError: syslog.syslog(syslog.LOG_ERR, "imagegenerator: aggregate interval required for aggregate type %s" % aggregate_type) syslog.syslog(syslog.LOG_ERR, "imagegenerator: line type %s skipped" % var_type) continue # Now its time to find and hit the database: binding = line_options['data_binding'] archive = self.db_binder.get_manager(binding) (start_vec_t, stop_vec_t, data_vec_t) = \ archive.getSqlVectors((minstamp, maxstamp), var_type, aggregate_type=aggregate_type, aggregate_interval=aggregate_interval) if weewx.debug: assert(len(start_vec_t) == len(stop_vec_t)) # Do any necessary unit conversions: new_start_vec_t = self.converter.convert(start_vec_t) new_stop_vec_t = self.converter.convert(stop_vec_t) new_data_vec_t = self.converter.convert(data_vec_t) # Add a unit label. NB: all will get overwritten except the # last. Get the label from the configuration dictionary. # TODO: Allow multiple unit labels, one for each plot line? unit_label = line_options.get('y_label', weewx.units.get_label_string(self.formatter, self.converter, var_type)) # Strip off any leading and trailing whitespace so it's # easy to center plot.setUnitLabel(unit_label.strip()) # See if a line label has been explicitly requested: label = line_options.get('label') if not label: # No explicit label. Is there a generic one? # If not, then the SQL type will be used instead label = self.title_dict.get(var_type, var_type) # See if a color has been explicitly requested. color = line_options.get('color') if color is not None: color = weeplot.utilities.tobgr(color) # Get the line width, if explicitly requested. width = to_int(line_options.get('width')) # Get the type of plot ("bar', 'line', or 'vector') plot_type = line_options.get('plot_type', 'line') interval_vec = None # Some plot types require special treatments: if plot_type == 'vector': vector_rotate_str = line_options.get('vector_rotate') vector_rotate = -float(vector_rotate_str) if vector_rotate_str is not None else None else: vector_rotate = None gap_fraction = None if plot_type == 'bar': interval_vec = [x[1] - x[0]for x in zip(new_start_vec_t.value, new_stop_vec_t.value)] elif plot_type == 'line': gap_fraction = to_float(line_options.get('line_gap_fraction')) if gap_fraction is not None: if not 0 < gap_fraction < 1: syslog.syslog(syslog.LOG_ERR, "imagegenerator: Gap fraction %5.3f outside range 0 to 1. Ignored." % gap_fraction) gap_fraction = None # Get the type of line (only 'solid' or 'none' for now) line_type = line_options.get('line_type', 'solid') if line_type.strip().lower() in ['', 'none']: line_type = None marker_type = line_options.get('marker_type') marker_size = to_int(line_options.get('marker_size', 8)) # Add the line to the emerging plot: plot.addLine(weeplot.genplot.PlotLine( new_stop_vec_t[0], new_data_vec_t[0], label = label, color = color, width = width, plot_type = plot_type, line_type = line_type, marker_type = marker_type, marker_size = marker_size, bar_width = interval_vec, vector_rotate = vector_rotate, gap_fraction = gap_fraction)) # OK, the plot is ready. Render it onto an image image = plot.render() try: # Now save the image image.save(img_file) ngen += 1 except IOError, e: syslog.syslog(syslog.LOG_CRIT, "imagegenerator: Unable to save to file '%s' %s:" % (img_file, e))
def genImages(self, gen_ts): """Generate the images. The time scales will be chosen to include the given timestamp, with nice beginning and ending times. gen_ts: The time around which plots are to be generated. This will also be used as the bottom label in the plots. [optional. Default is to use the time of the last record in the archive database.] """ t1 = time.time() ngen = 0 # Loop over each time span class (day, week, month, etc.): for timespan in self.image_dict.sections : # Now, loop over all plot names in this time span class: for plotname in self.image_dict[timespan].sections : # Accumulate all options from parent nodes: plot_options = weeutil.weeutil.accumulateLeaves(self.image_dict[timespan][plotname]) # Get the database archive archivedb = self._getArchive(plot_options['archive_database']) plotgen_ts = gen_ts if not plotgen_ts: plotgen_ts = archivedb.lastGoodStamp() if not plotgen_ts: plotgen_ts = time.time() image_root = os.path.join(self.config_dict['WEEWX_ROOT'], plot_options['HTML_ROOT']) # Get the path of the file that the image is going to be saved to: img_file = os.path.join(image_root, '%s.png' % plotname) # Check whether this plot needs to be done at all: ai = plot_options.as_int('aggregate_interval') if plot_options.has_key('aggregate_interval') else None if skipThisPlot(plotgen_ts, ai, img_file) : continue # Create the subdirectory that the image is to be put in. # Wrap in a try block in case it already exists. try: os.makedirs(os.path.dirname(img_file)) except: pass # Create a new instance of a time plot and start adding to it plot = weeplot.genplot.TimePlot(plot_options) # Calculate a suitable min, max time for the requested time span and set it (minstamp, maxstamp, timeinc) = weeplot.utilities.scaletime(plotgen_ts - int(plot_options.get('time_length', 86400)), plotgen_ts) plot.setXScaling((minstamp, maxstamp, timeinc)) # Set the y-scaling, using any user-supplied hints: plot.setYScaling(weeutil.weeutil.convertToFloat(plot_options.get('yscale', ['None', 'None', 'None']))) # Get a suitable bottom label: bottom_label_format = plot_options.get('bottom_label_format', '%m/%d/%y %H:%M') bottom_label = time.strftime(bottom_label_format, time.localtime(plotgen_ts)) plot.setBottomLabel(bottom_label) # Set day/night display plot.setLocation(self.stn_info.latitude_f, self.stn_info.longitude_f) plot.setDayNight(to_bool(plot_options.get('show_daynight', False)), weeplot.utilities.tobgr(plot_options.get('daynight_day_color', '0xffffff')), weeplot.utilities.tobgr(plot_options.get('daynight_night_color', '0xf0f0f0')), weeplot.utilities.tobgr(plot_options.get('daynight_edge_color', '0xefefef'))) # Loop over each line to be added to the plot. for line_name in self.image_dict[timespan][plotname].sections: # Accumulate options from parent nodes. line_options = weeutil.weeutil.accumulateLeaves(self.image_dict[timespan][plotname][line_name]) # See what SQL variable type to use for this line. By default, # use the section name. var_type = line_options.get('data_type', line_name) # Add a unit label. NB: all will get overwritten except the last. # Get the label from the configuration dictionary. # TODO: Allow multiple unit labels, one for each plot line? unit_label = line_options.get('y_label', self.unit_helper.label.get(var_type, '')) # Strip off any leading and trailing whitespace so it's easy to center plot.setUnitLabel(unit_label.strip()) # See if a line label has been explicitly requested: label = line_options.get('label') if not label: # No explicit label. Is there a generic one? # If not, then the SQL type will be used instead label = self.title_dict.get(var_type, var_type) # See if a color has been explicitly requested. color = line_options.get('color') if color is not None: color = weeplot.utilities.tobgr(color) # Get the line width, if explicitly requested. width = to_int(line_options.get('width')) # Get the type of plot ("bar', 'line', or 'vector') plot_type = line_options.get('plot_type', 'line') if plot_type == 'vector': vector_rotate_str = line_options.get('vector_rotate') vector_rotate = -float(vector_rotate_str) if vector_rotate_str is not None else None else: vector_rotate = None # Get the type of line ('solid' or 'none' is all that's offered now) line_type = line_options.get('line_type', 'solid') if line_type.strip().lower() in ['', 'none']: line_type = None marker_type = line_options.get('marker_type') marker_size = to_int(line_options.get('marker_size')) # Look for aggregation type: aggregate_type = line_options.get('aggregate_type') if aggregate_type in (None, '', 'None', 'none'): # No aggregation specified. aggregate_type = None # Set the aggregate interval to the nominal archive interval: aggregate_interval = self._getArchiveInterval(archivedb) else : try: # Aggregation specified. Get the interval. aggregate_interval = line_options.as_int('aggregate_interval') except KeyError: syslog.syslog(syslog.LOG_ERR, "genimages: aggregate interval required for aggregate type %s" % aggregate_type) syslog.syslog(syslog.LOG_ERR, "genimages: line type %s skipped" % var_type) continue # Get the fraction that defines gap size if plot_type == 'bar': gap_fraction = line_options.get('bar_gap_fraction') elif plot_type == 'line': gap_fraction = line_options.get('line_gap_fraction') else: gap_fraction = None if gap_fraction is not None: gap_fraction = float(gap_fraction) if not 0 < gap_fraction < 1: syslog.syslog(syslog.LOG_ERR, "genimages: gap fraction must be greater than zero and less than one. Ignored.") gap_fraction = None # Get the time and data vectors from the database: (time_vec_t, data_vec_t) = archivedb.getSqlVectorsExtended(var_type, minstamp, maxstamp, aggregate_interval, aggregate_type) new_time_vec_t = self.converter.convert(time_vec_t) new_data_vec_t = self.converter.convert(data_vec_t) # Add the line to the emerging plot: plot.addLine(weeplot.genplot.PlotLine(new_time_vec_t[0], new_data_vec_t[0], label = label, color = color, width = width, plot_type = plot_type, line_type = line_type, marker_type = marker_type, marker_size = marker_size, bar_width = aggregate_interval, vector_rotate = vector_rotate, gap_fraction = gap_fraction)) # OK, the plot is ready. Render it onto an image image = plot.render() # Now save the image image.save(img_file) ngen += 1 t2 = time.time() syslog.syslog(syslog.LOG_INFO, "genimages: Generated %d images for %s in %.2f seconds" % (ngen, self.skin_dict['REPORT_NAME'], t2 - t1))