def run(self): import weeutil.rsyncupload # We don't try to collect performance statistics about rsync, because rsync # will report them for us. Check the debug log messages. try: if self.skin_dict.has_key('HTML_ROOT'): html_root = self.skin_dict['HTML_ROOT'] else: html_root = self.config_dict['StdReport']['HTML_ROOT'] rsyncData = weeutil.rsyncupload.RsyncUpload( local_root = os.path.join(self.config_dict['WEEWX_ROOT'], html_root), remote_root = self.skin_dict['path'], server = self.skin_dict['server'], user = self.skin_dict.get('user'), port = self.skin_dict.get('port'), ssh_options = self.skin_dict.get('ssh_options'), compress = to_bool(self.skin_dict.get('compress', False)), delete = to_bool(self.skin_dict.get('delete', False))) except Exception: syslog.syslog(syslog.LOG_DEBUG, "reportengine: rsync upload not requested. Skipped.") return try: rsyncData.run() except (IOError), e: (cl, unused_ob, unused_tr) = sys.exc_info() syslog.syslog(syslog.LOG_ERR, "reportengine: Caught exception %s in RsyncGenerator; %s." % (cl, e))
def run(self): import weeutil.ftpupload t1 = time.time() if self.skin_dict.has_key('HTML_ROOT'): local_root = os.path.join(self.config_dict['WEEWX_ROOT'], self.skin_dict['HTML_ROOT']) else: local_root = os.path.join(self.config_dict['WEEWX_ROOT'], self.config_dict['StdReport']['HTML_ROOT']) try: ftpData = weeutil.ftpupload.FtpUpload(server = self.skin_dict['server'], user = self.skin_dict['user'], password = self.skin_dict['password'], local_root = local_root, remote_root = self.skin_dict['path'], port = int(self.skin_dict.get('port', 21)), name = self.skin_dict['REPORT_NAME'], passive = to_bool(self.skin_dict.get('passive', True)), max_tries = int(self.skin_dict.get('max_tries', 3)), secure = to_bool(self.skin_dict.get('secure_ftp', False))) except Exception: syslog.syslog(syslog.LOG_DEBUG, "reportengine: FTP upload not requested. Skipped.") return try: N = ftpData.run() except (socket.timeout, socket.gaierror, ftplib.all_errors, IOError), e: (cl, unused_ob, unused_tr) = sys.exc_info() syslog.syslog(syslog.LOG_ERR, "reportengine: Caught exception %s in FtpGenerator; %s." % (cl, e)) weeutil.weeutil.log_traceback(" **** ") return
def genSchemaOf(self, table): """Return a summary of the schema of the specified table. If the table does not exist, an exception of type weedb.OperationalError is raised.""" for row in self.connection.execute("""PRAGMA table_info(%s);""" % table): if row[2].upper().startswith('CHAR'): coltype = 'STR' else: coltype = str(row[2]).upper() yield (row[0], str(row[1]), coltype, not to_bool(row[3]), row[4], to_bool(row[5]))
def __init__(self, q, api_key, station=0, server_url=Windy.DEFAULT_URL, skip_upload=False, manager_dict=None, post_interval=None, max_backlog=sys.maxsize, stale=None, log_success=True, log_failure=True, timeout=60, max_tries=3, retry_wait=5): super(WindyThread, self).__init__(q, protocol_name='Windy', manager_dict=manager_dict, post_interval=post_interval, max_backlog=max_backlog, stale=stale, log_success=log_success, log_failure=log_failure, max_tries=max_tries, timeout=timeout, retry_wait=retry_wait) self.api_key = api_key self.station = to_int(station) self.server_url = server_url loginf("Data will be uploaded to %s" % self.server_url) self.skip_upload = to_bool(skip_upload)
def run(self): import user.s3backup # determine how much logging is desired log_success = to_bool(self.skin_dict.get('log_success', True)) t1 = time.time() try: S3_backup = user.s3backup.S3Backup( bucket=self.skin_dict['S3_BUCKET'], profile=self.skin_dict['AWS_Profile'], region=self.skin_dict['AWS_Region'], weewx_root=self.config_dict['WEEWX_ROOT'], sqlite_root=self.config_dict['DatabaseTypes']['SQLite'] ['SQLITE_ROOT'], database_name=self.config_dict['Databases']['archive_sqlite'] ['database_name']) except KeyError as e: log.error( "S3BackupGenerator: S3 Backup not requested. Skipped with error: %" % (e)) return try: n = S3_backup.run() except () as e: log.error("S3BackupGenerator: Caught exception: %s" % (e)) return if log_success: t2 = time.time() log.info( "S3BackupGenerator: AWS-S3 copied files to S3 in %d attemps which took in %0.2f seconds" % ((n + 1), (t2 - t1)))
def __init__(self, queue, id, key, manager_dict, server_url=_SERVER_URL, skip_upload=False, post_interval=600, max_backlog=MAXSIZE, stale=None, log_success=True, log_failure=True, timeout=60, max_tries=3, retry_wait=5): super(WeatherCloudThread, self).__init__(queue, protocol_name='WeatherCloud', manager_dict=manager_dict, post_interval=post_interval, max_backlog=max_backlog, stale=stale, log_success=log_success, log_failure=log_failure, max_tries=max_tries, timeout=timeout, retry_wait=retry_wait) self.id = id self.key = key self.server_url = server_url self.skip_upload = to_bool(skip_upload)
def setup(self): self.image_dict = self.skin_dict['ImageGenerator'] self.title_dict = self.skin_dict.get('Labels', {}).get('Generic', {}) self.formatter = weewx.units.Formatter.fromSkinDict(self.skin_dict) self.converter = weewx.units.Converter.fromSkinDict(self.skin_dict) # determine how much logging is desired self.log_success = to_bool(self.image_dict.get('log_success', True))
def __init__(self, config_dict): """Initialize an instance of StdEngine. config_dict: The configuration dictionary. """ # Set a default socket time out, in case FTP or HTTP hang: timeout = int(config_dict.get('socket_timeout', 20)) socket.setdefaulttimeout(timeout) # Default garbage collection is every 3 hours: self.gc_interval = int(config_dict.get('gc_interval', 3 * 3600)) # Whether to log events. This can be very verbose. self.log_events = to_bool(config_dict.get('log_events', False)) # The callback dictionary: self.callbacks = dict() # This will hold an instance of the device driver self.console = None # Set up the device driver: self.setupStation(config_dict) # Set up information about the station self.stn_info = weewx.station.StationInfo(self.console, **config_dict['Station']) # Set up the database binder self.db_binder = weewx.manager.DBBinder(config_dict) # The list of instantiated services self.service_obj = [] # Load the services: self.loadServices(config_dict)
def __init__(self, config_dict, fix_config_dict): """A generic initialisation.""" # save our weewx config dict self.config_dict = config_dict # save our fix config dict self.fix_config_dict = fix_config_dict # get our name self.name = fix_config_dict['name'] # is this a dry run self.dry_run = to_bool(fix_config_dict.get('dry_run', True)) # Get the binding for the archive we are to use. If we received an # explicit binding then use that otherwise use the binding that # StdArchive uses. try: db_binding = fix_config_dict['binding'] except KeyError: if 'StdArchive' in config_dict: db_binding = config_dict['StdArchive'].get( 'data_binding', 'wx_binding') else: db_binding = 'wx_binding' self.binding = db_binding # get a database manager object self.dbm = weewx.manager.open_manager_with_config( config_dict, self.binding)
def __init__(self, config_dict): """Initialize an instance of StdEngine. config_dict: The configuration dictionary. """ # Set a default socket time out, in case FTP or HTTP hang: timeout = int(config_dict.get('socket_timeout', 20)) socket.setdefaulttimeout(timeout) # Default garbage collection is every 3 hours: self.gc_interval = int(config_dict.get('gc_interval', 3 * 3600)) # Whether to log events. This can be very verbose. self.log_events = to_bool(config_dict.get('log_events', False)) # Set up the callback dictionary: self.callbacks = dict() # Set up the weather station hardware: self.setupStation(config_dict) # Hook for performing any chores before loading the services: self.preLoadServices(config_dict) # Load the services: self.loadServices(config_dict) # Another hook for after the services load. self.postLoadServices(config_dict)
def __init__(self, host='localhost', user='', password='', database_name='', port=3306, engine=DEFAULT_ENGINE, autocommit=True, **kwargs): """Initialize an instance of Connection. Parameters: host: IP or hostname with the mysql database (required) user: User name (required) password: The password for the username (required) database_name: The database to be used. (required) port: Its port number (optional; default is 3306) engine: The MySQL database engine to use (optional; default is 'INNODB') autocommit: If True, autocommit is enabled (default is True) kwargs: Any extra arguments you may wish to pass on to MySQL connect statement. See the file MySQLdb/connections.py for a list (optional). """ connection = MySQLdb.connect(host=host, port=int(port), user=user, passwd=password, db=database_name, **kwargs) weedb.Connection.__init__(self, connection, database_name, 'mysql') # Set the storage engine to be used set_engine(self.connection, engine) # Set the transaction isolation level. self.connection.query("SET TRANSACTION ISOLATION LEVEL READ COMMITTED") self.connection.autocommit(to_bool(autocommit))
def genSchemaOf(self, table): """Return a summary of the schema of the specified table. If the table does not exist, an exception of type weedb.OperationalError is raised.""" try: # Get a cursor directly from MySQL: cursor = self.connection.cursor() # MySQL throws an exception if you try to show the columns of a # non-existing table try: cursor.execute("""SHOW COLUMNS IN %s;""" % table) except _mysql_exceptions.ProgrammingError, e: # Table does not exist. Change the exception type: raise weedb.OperationalError(e) irow = 0 while True: row = cursor.fetchone() if row is None: break # Append this column to the list of columns. colname = str(row[0]) if row[1].upper()=='DOUBLE': coltype = 'REAL' elif row[1].upper().startswith('INT'): coltype = 'INTEGER' elif row[1].upper().startswith('CHAR'): coltype = 'STR' else: coltype = str(row[1]).upper() is_primary = True if row[3] == 'PRI' else False yield (irow, colname, coltype, to_bool(row[2]), row[4], is_primary) irow += 1
def run(self): """Main entry point for file generation using Cheetah Templates.""" t1 = time.time() self.setup() # Make a copy of the skin dictionary (we will be modifying it): gen_dict = configobj.ConfigObj(self.skin_dict.dict()) # Look for options in [CheetahGenerator], section_name = "CheetahGenerator" # but accept options from [FileGenerator] for backward compatibility. if "FileGenerator" in gen_dict and "CheetahGenerator" not in gen_dict: section_name = "FileGenerator" # The default summary time span is 'None'. gen_dict[section_name]['summarize_by'] = 'None' # determine how much logging is desired log_success = to_bool(gen_dict[section_name].get('log_success', True)) # configure the search list extensions self.initExtensions(gen_dict[section_name]) # Generate any templates in the given dictionary: ngen = self.generate(gen_dict[section_name], self.gen_ts) self.teardown() elapsed_time = time.time() - t1 if log_success: loginf("Generated %d files for report %s in %.2f seconds" % (ngen, self.skin_dict['REPORT_NAME'], elapsed_time))
def genSchemaOf(self, table): """Return a summary of the schema of the specified table. If the table does not exist, an exception of type weedb.OperationalError is raised.""" # Get a cursor directly from MySQL: cursor = self.connection.cursor() try: # If the table does not exist, this will raise a MySQL ProgrammingError exception, # which gets converted to a weedb.OperationalError exception by the guard decorator cursor.execute("""SHOW COLUMNS IN %s;""" % table) irow = 0 while True: row = cursor.fetchone() if row is None: break # Append this column to the list of columns. colname = str(row[0]) if row[1].upper() == 'DOUBLE': coltype = 'REAL' elif row[1].upper().startswith('INT'): coltype = 'INTEGER' elif row[1].upper().startswith('CHAR'): coltype = 'STR' else: coltype = str(row[1]).upper() is_primary = True if row[3] == 'PRI' else False can_be_null = False if row[2] == '' else to_bool(row[2]) yield (irow, colname, coltype, can_be_null, row[4], is_primary) irow += 1 finally: cursor.close()
def run(self): import weeutil.rsyncupload # We don't try to collect performance statistics about rsync, because rsync # will report them for us. Check the debug log messages. try: if self.skin_dict.has_key('HTML_ROOT'): html_root = self.skin_dict['HTML_ROOT'] else: html_root = self.config_dict['StdReport']['HTML_ROOT'] rsyncData = weeutil.rsyncupload.RsyncUpload( local_root=os.path.join(self.config_dict['WEEWX_ROOT'], html_root), remote_root=self.skin_dict['path'], server=self.skin_dict['server'], user=self.skin_dict.get('user'), port=self.skin_dict.get('port'), delete=to_bool(self.skin_dict.get('delete', False))) except Exception: syslog.syslog( syslog.LOG_DEBUG, "reportengine: rsync upload not requested. Skipped.") return try: rsyncData.run() except (IOError), e: (cl, unused_ob, unused_tr) = sys.exc_info() syslog.syslog( syslog.LOG_ERR, "reportengine: Caught exception %s in RsyncGenerator; %s." % (cl, e))
def genSchemaOf(self, table): """Return a summary of the schema of the specified table. If the table does not exist, an exception of type weedb.OperationalError is raised.""" # Get a cursor directly from MySQL: cursor = self.connection.cursor() try: # If the table does not exist, this will raise a MySQL ProgrammingError exception, # which gets converted to a weedb.OperationalError exception by the guard decorator cursor.execute("""SHOW COLUMNS IN %s;""" % table) irow = 0 while True: row = cursor.fetchone() if row is None: break # Append this column to the list of columns. colname = str(row[0]) if row[1].upper() == 'DOUBLE': coltype = 'REAL' elif row[1].upper().startswith('INT'): coltype = 'INTEGER' elif row[1].upper().startswith('CHAR'): coltype = 'STR' else: coltype = str(row[1]).upper() is_primary = True if row[3] == 'PRI' else False can_be_null = False if row[2]=='' else to_bool(row[2]) yield (irow, colname, coltype, can_be_null, row[4], is_primary) irow += 1 finally: cursor.close()
def run(self): log_success = to_bool(self.skin_dict.get('log_success', True)) t1 = time.time() if 'HTML_ROOT' in self.skin_dict: local_root = os.path.join(self.config_dict['WEEWX_ROOT'], self.skin_dict['HTML_ROOT']) else: local_root = os.path.join( self.config_dict['WEEWX_ROOT'], self.config_dict.get('StdReport', {}).get('HTML_ROOT', 'public_html')) logdbg("running SFTPGenerator %s" % VERSION) try: uploader = SFTPUploader( server=self.skin_dict['server'], user=self.skin_dict['user'], password=self.skin_dict['password'], local_root=self.skin_dict['local_root'], remote_root=self.skin_dict['path'], port=int(self.skin_dict.get('port', 2222)), name=self.skin_dict.get('REPORT_NAME', 'SFTP'), max_tries=int(self.skin_dict.get('max_tries', 3)), debug=int(self.skin_dict.get('debug', 0))) except KeyError, e: loginf("upload not possible: missing parameter %s" % e, "sftpgenerator") return
def run(self): copy_dict = self.skin_dict['CopyGenerator'] # determine how much logging is desired log_success = to_bool(copy_dict.get('log_success', True)) copy_list = [] if self.first_run: # Get the list of files to be copied only once, at the first invocation of # the generator. Wrap in a try block in case the list does not exist. try: copy_list += weeutil.weeutil.option_as_list(copy_dict['copy_once']) except KeyError: pass # Get the list of files to be copied everytime. Again, wrap in a try block. try: copy_list += weeutil.weeutil.option_as_list(copy_dict['copy_always']) except KeyError: pass # Change directory to the skin subdirectory: os.chdir(os.path.join(self.config_dict['WEEWX_ROOT'], self.skin_dict['SKIN_ROOT'], self.skin_dict['skin'])) # Figure out the destination of the files html_dest_dir = os.path.join(self.config_dict['WEEWX_ROOT'], self.skin_dict['HTML_ROOT'])
def __init__(self, engine, config_dict): super(StdArchive, self).__init__(engine, config_dict) # Extract the various options from the config file. If it's missing, fill in with defaults: if 'StdArchive' in config_dict: self.data_binding = config_dict['StdArchive'].get('data_binding', 'wx_binding') self.record_generation = config_dict['StdArchive'].get('record_generation', 'hardware').lower() self.archive_delay = to_int(config_dict['StdArchive'].get('archive_delay', 15)) software_interval = to_int(config_dict['StdArchive'].get('archive_interval', 300)) self.loop_hilo = to_bool(config_dict['StdArchive'].get('loop_hilo', True)) else: self.data_binding = 'wx_binding' self.record_generation = 'hardware' self.archive_delay = 15 software_interval = 300 self.loop_hilo = True syslog.syslog(syslog.LOG_INFO, "engine: Archive will use data binding %s" % self.data_binding) syslog.syslog(syslog.LOG_INFO, "engine: Record generation will be attempted in '%s'" % (self.record_generation,)) # If the station supports a hardware archive interval, use that. # Warn if it is different than what is in config. ival_msg = '' try: if software_interval != self.engine.console.archive_interval: syslog.syslog(syslog.LOG_ERR, "engine: The archive interval in the" " configuration file (%d) does not match the" " station hardware interval (%d)." % (software_interval, self.engine.console.archive_interval)) self.archive_interval = self.engine.console.archive_interval ival_msg = "(specified by hardware)" except NotImplementedError: self.archive_interval = software_interval ival_msg = "(specified in weewx configuration)" syslog.syslog(syslog.LOG_INFO, "engine: Using archive interval of %d seconds %s" % (self.archive_interval, ival_msg)) if self.archive_delay <= 0: raise weewx.ViolatedPrecondition("Archive delay (%.1f) must be greater than zero." % (self.archive_delay,)) if self.archive_delay >= self.archive_interval / 2: syslog.syslog(syslog.LOG_WARNING, "engine: Archive delay (%d) is unusually long" % (self.archive_delay,)) syslog.syslog(syslog.LOG_DEBUG, "engine: Use LOOP data in hi/low calculations: %d" % (self.loop_hilo,)) self.setup_database(config_dict) self.bind(weewx.STARTUP, self.startup) self.bind(weewx.PRE_LOOP, self.pre_loop) self.bind(weewx.POST_LOOP, self.post_loop) self.bind(weewx.CHECK_LOOP, self.check_loop) self.bind(weewx.NEW_LOOP_PACKET, self.new_loop_packet) self.bind(weewx.NEW_ARCHIVE_RECORD, self.new_archive_record)
def run(self): import weeutil.ftpupload # determine how much logging is desired log_success = to_bool(search_up(self.skin_dict, 'log_success', True)) t1 = time.time() try: local_root = os.path.join( self.config_dict['WEEWX_ROOT'], self.skin_dict.get('HTML_ROOT', self.config_dict['StdReport']['HTML_ROOT'])) ftp_data = weeutil.ftpupload.FtpUpload( server=self.skin_dict['server'], user=self.skin_dict['user'], password=self.skin_dict['password'], local_root=local_root, remote_root=self.skin_dict['path'], port=int(self.skin_dict.get('port', 21)), name=self.skin_dict['REPORT_NAME'], passive=to_bool(self.skin_dict.get('passive', True)), max_tries=int(self.skin_dict.get('max_tries', 3)), secure=to_bool(self.skin_dict.get('secure_ftp', False)), debug=int(self.skin_dict.get('debug', 0)), secure_data=to_bool(self.skin_dict.get('secure_data', True))) except KeyError: syslog.syslog(syslog.LOG_DEBUG, "ftpgenerator: FTP upload not requested. Skipped.") return try: n = ftp_data.run() except (socket.timeout, socket.gaierror, ftplib.all_errors, IOError) as e: (cl, unused_ob, unused_tr) = sys.exc_info() syslog.syslog(syslog.LOG_ERR, "ftpgenerator: " "Caught exception %s: %s" % (cl, e)) weeutil.weeutil.log_traceback(" **** ") return if log_success: t2 = time.time() syslog.syslog( syslog.LOG_INFO, "ftpgenerator: ftp'd %d files in %0.2f seconds" % (n, (t2 - t1)))
def run(self): """This is where the actual work gets done. Runs through the list of reports. """ if self.gen_ts: syslog.syslog( syslog.LOG_DEBUG, "reportengine: Running reports for time %s" % weeutil.weeutil.timestamp_to_string(self.gen_ts)) else: syslog.syslog( syslog.LOG_DEBUG, "reportengine: " "Running reports for latest time in the database.") # Iterate over each requested report for report in self.config_dict['StdReport'].sections: # See if this report is disabled enabled = to_bool(self.config_dict['StdReport'][report].get( 'enable', True)) if not enabled: syslog.syslog(syslog.LOG_DEBUG, "reportengine: Skipping report %s" % report) continue syslog.syslog(syslog.LOG_DEBUG, "reportengine: Running report %s" % report) # Figure out where the configuration file is for the skin used for # this report: skin_config_path = os.path.join( self.config_dict['WEEWX_ROOT'], self.config_dict['StdReport']['SKIN_ROOT'], self.config_dict['StdReport'][report].get('skin', 'Standard'), 'skin.conf') # Retrieve the configuration dictionary for the skin. Wrap it in # a try block in case we fail try: skin_dict = configobj.ConfigObj(skin_config_path, file_error=True) syslog.syslog( syslog.LOG_DEBUG, "reportengine: Found configuration file %s for report %s" % (skin_config_path, report)) except IOError, e: syslog.syslog( syslog.LOG_ERR, "reportengine: " "Cannot read skin configuration file %s for report %s: %s" % (skin_config_path, report, e)) syslog.syslog(syslog.LOG_ERR, " **** Report ignored") continue except SyntaxError, e: syslog.syslog( syslog.LOG_ERR, "reportengine: " "Failed to read skin configuration file %s for report %s: %s" % (skin_config_path, report, e)) syslog.syslog(syslog.LOG_ERR, " **** Report ignored") continue
def run(self): copy_dict = self.skin_dict['CopyGenerator'] # determine how much logging is desired log_success = to_bool(search_up(copy_dict, 'log_success', True)) copy_list = [] if self.first_run: # Get the list of files to be copied only once, at the first # invocation of the generator. Wrap in a try block in case the # list does not exist. try: copy_list += weeutil.weeutil.option_as_list( copy_dict['copy_once']) except KeyError: pass # Get the list of files to be copied everytime. Again, wrap in a # try block. try: copy_list += weeutil.weeutil.option_as_list( copy_dict['copy_always']) except KeyError: pass # Change directory to the skin subdirectory: os.chdir( os.path.join(self.config_dict['WEEWX_ROOT'], self.skin_dict['SKIN_ROOT'], self.skin_dict['skin'])) # Figure out the destination of the files html_dest_dir = os.path.join(self.config_dict['WEEWX_ROOT'], self.skin_dict['HTML_ROOT']) # The copy list can contain wildcard characters. Go through the # list globbing any character expansions ncopy = 0 for pattern in copy_list: # Glob this pattern; then go through each resultant filename: for _file in glob.glob(pattern): # Final destination is the join of the html destination # directory and any relative subdirectory on the filename: dest_dir = os.path.join(html_dest_dir, os.path.dirname(_file)) # Make the destination directory, wrapping it in a try block in # case it already exists: try: os.makedirs(dest_dir) except OSError: pass # This version of copy does not copy over modification time, # so it will look like a new file, causing it to be (for # example) ftp'd to the server: shutil.copy(_file, dest_dir) ncopy += 1 if log_success: syslog.syslog( syslog.LOG_INFO, "copygenerator: " "copied %d files to %s" % (ncopy, html_dest_dir))
def __init__(self, queue, database, username=None, password=None, dbadmin_username=None, dbadmin_password=None, line_format='single-line', tags=None, unit_system=None, augment_record=True, inputs=dict(), obs_to_upload='all', append_units_label=True, server_url=_DEFAULT_SERVER_URL, skip_upload=False, manager_dict=None, post_interval=None, max_backlog=sys.maxint, stale=None, log_success=True, log_failure=True, timeout=60, max_tries=3, retry_wait=5): super(InfluxThread, self).__init__(queue, protocol_name='Influx', manager_dict=manager_dict, post_interval=post_interval, max_backlog=max_backlog, stale=stale, log_success=log_success, log_failure=log_failure, max_tries=max_tries, timeout=timeout, retry_wait=retry_wait) self.database = database self.username = username self.password = password self.tags = tags self.upload_all = True if obs_to_upload.lower() == 'all' else False self.append_units_label = append_units_label self.inputs = inputs self.server_url = server_url self.skip_upload = to_bool(skip_upload) self.unit_system = unit_system self.augment_record = augment_record self.templates = dict() self.line_format = line_format # ensure that the database exists qstr = urllib.urlencode({'q': 'CREATE DATABASE %s' % self.database}) url = '%s/query?%s' % (self.server_url, qstr) req = urllib2.Request(url) req.add_header("User-Agent", "weewx/%s" % weewx.__version__) uname = None pword = None if dbadmin_username is not None: uname = dbadmin_username pword = dbadmin_password elif username is not None: uname = username pword = password if uname is not None: b64s = base64.encodestring( '%s:%s' % (uname, pword)).replace('\n', '') req.add_header("Authorization", "Basic %s" % b64s) try: self.post_request(req) except (urllib2.URLError, socket.error, httplib.BadStatusLine, httplib.IncompleteRead), e: logerr("create database failed: %s" % e)
def run(self): import weeutil.ftpupload # determine how much logging is desired log_success = to_bool(self.skin_dict.get('log_success', True)) t1 = time.time() if self.skin_dict.has_key('HTML_ROOT'): local_root = os.path.join(self.config_dict['WEEWX_ROOT'], self.skin_dict['HTML_ROOT']) else: local_root = os.path.join( self.config_dict['WEEWX_ROOT'], self.config_dict['StdReport']['HTML_ROOT']) try: ftpData = weeutil.ftpupload.FtpUpload( server=self.skin_dict['server'], user=self.skin_dict['user'], password=self.skin_dict['password'], local_root=local_root, remote_root=self.skin_dict['path'], port=int(self.skin_dict.get('port', 21)), name=self.skin_dict['REPORT_NAME'], passive=to_bool(self.skin_dict.get('passive', True)), max_tries=int(self.skin_dict.get('max_tries', 3)), secure=to_bool(self.skin_dict.get('secure_ftp', False)), debug=int(self.skin_dict.get('debug', 0))) except Exception: syslog.syslog(syslog.LOG_DEBUG, "reportengine: FTP upload not requested. Skipped.") return try: N = ftpData.run() except (socket.timeout, socket.gaierror, ftplib.all_errors, IOError), e: (cl, unused_ob, unused_tr) = sys.exc_info() syslog.syslog( syslog.LOG_ERR, "reportengine: Caught exception %s in FtpGenerator; %s." % (cl, e)) weeutil.weeutil.log_traceback(" **** ") return
def _init_topic_dict(topic, site_dict, topic_dict, payload_type=None): topic_dict['skip_upload'] = site_dict['topics'][topic] \ .get('skip_upload', site_dict.get('skip_upload', False)) topic_dict['binding'] = site_dict['topics'][topic].get( 'binding', site_dict.get('binding', 'archive')) if payload_type is None: topic_dict['type'] = site_dict['topics'][topic] \ .get('type', site_dict.get('type', 'json')) else: topic_dict['type'] = payload_type topic_dict['append_units_label'] = to_bool(site_dict['topics'][topic] \ .get('append_units_label', site_dict.get('append_units_label', True))) topic_dict['conversion_type'] = site_dict['topics'][topic] \ .get('conversion_type', site_dict.get('conversion_type', 'string')) topic_dict['augment_record'] = to_bool(site_dict['topics'][topic] \ .get('augment_record', site_dict.get('augment_record', True))) usn = site_dict['topics'][topic].get( 'unit_system', site_dict.get('unit_system', None)) if usn is not None: topic_dict['unit_system'] = weewx.units.unit_constants[usn] loginf("for %s: desired unit system is %s" % (topic, usn)) topic_dict['upload_all'] = bool(site_dict['topics'][topic] \ .get('obs_to_upload', site_dict.get('obs_to_upload', 'all')).lower() == 'all') topic_dict['retain'] = to_bool(site_dict['topics'][topic].get( 'retain', site_dict.get('retain', False))) topic_dict['qos'] = to_int(site_dict['topics'][topic].get( 'qos', site_dict.get('qos', 0))) topic_dict['inputs'] = dict(site_dict['topics'][topic].get( 'inputs', site_dict.get('inputs', {}))) topic_dict['templates'] = dict() loginf("for %s binding to %s" % (topic, topic_dict['binding']))
def __init__(self, **stn_dict): loginf("version is %s" % DRIVER_VERSION) self.xferfile = stn_dict['xferfile'] self.poll_interval = float(stn_dict.get('poll_interval', 10)) self.dup_interval = float(stn_dict.get('dup_interval', 5)) self.max_tries = int(stn_dict.get('max_tries', 5)) self.retry_wait = int(stn_dict.get('retry_wait', 2)) self.mode = stn_dict.get('mode', 'direct') self.check_calibration = to_bool( stn_dict.get('check_calibration', False)) self.set_calibration = to_bool(stn_dict.get('set_calibration', False)) self.last_rain_total = None self.last_datetime = 0 if self.mode == 'direct': self._station = ObserverIPStation(**stn_dict) if self.chkunits(ObserverIPDriver.EXPECTED_UNITS): logerr("calibration error: %s is expexted to be %f but is %f" % (i, to_float(calibdata[i]), to_float(stcalib[i]))) raise Exception("Station units not set correctly") if self._station.version() in ObserverIPDriver.SENSOR_MAP: self.map = ObserverIPDriver.SENSOR_MAP[self._station.version()] else: loginf("Unknown firmware version: %s" % self._station.version()) self.map = ObserverIPDriver.SENSOR_MAP['default'] else: self.map = ObserverIPDriver.SENSOR_MAP['wu'] if self.check_calibration: self._station = ObserverIPStation(**stn_dict) if self.chkunits(ObserverIPDriver.EXPECTED_UNITS): raise Exception("Station units not set correctly") if 'calibration' in stn_dict and self.check_calibration: if self.chkcalib(stn_dict['calibration']): if(self.set_calibration): self._station.setcalibration(stn_dict['calibration']) if self.chkcalib(stn_dict['calibration']): raise Exception("Setting calibration unsuccessful") else: raise Exception("calibration error") loginf("polling interval is %s" % self.poll_interval)
def __init__(self, engine, config_dict): super(StdArchive, self).__init__(engine, config_dict) # Extract the various options from the config file. If it's missing, fill in with defaults: if 'StdArchive' in config_dict: self.data_binding = config_dict['StdArchive'].get('data_binding', 'wx_binding') self.record_generation = config_dict['StdArchive'].get('record_generation', 'hardware').lower() self.archive_delay = to_int(config_dict['StdArchive'].get('archive_delay', 15)) software_interval = to_int(config_dict['StdArchive'].get('archive_interval', 300)) self.loop_hilo = to_bool(config_dict['StdArchive'].get('loop_hilo', True)) else: self.data_binding = 'wx_binding' self.record_generation = 'hardware' self.archive_delay = 15 software_interval = 300 self.loop_hilo = True syslog.syslog(syslog.LOG_INFO, "engine: Archive will use data binding %s" % self.data_binding) syslog.syslog(syslog.LOG_INFO, "engine: Record generation will be attempted in '%s'" % (self.record_generation,)) # If the station supports a hardware archive interval, use that. # Warn if it is different than what is in config. ival_msg = '' try: if software_interval != self.engine.console.archive_interval: syslog.syslog(syslog.LOG_ERR, "engine: The archive interval in the" " configuration file (%d) does not match the" " station hardware interval (%d)." % (software_interval, self.engine.console.archive_interval)) self.archive_interval = self.engine.console.archive_interval ival_msg = "(specified by hardware)" except NotImplementedError: self.archive_interval = software_interval ival_msg = "(specified in weewx configuration)" syslog.syslog(syslog.LOG_INFO, "engine: Using archive interval of %d seconds %s" % (self.archive_interval, ival_msg)) if self.archive_delay <= 0: raise weewx.ViolatedPrecondition("Archive delay (%.1f) must be greater than zero." % (self.archive_delay,)) syslog.syslog(syslog.LOG_DEBUG, "engine: Use LOOP data in hi/low calculations: %d" % (self.loop_hilo,)) self.setup_database(config_dict) self.bind(weewx.STARTUP, self.startup) self.bind(weewx.PRE_LOOP, self.pre_loop) self.bind(weewx.POST_LOOP, self.post_loop) self.bind(weewx.CHECK_LOOP, self.check_loop) self.bind(weewx.NEW_LOOP_PACKET, self.new_loop_packet) self.bind(weewx.NEW_ARCHIVE_RECORD, self.new_archive_record)
def __init__(self, config_dict, altitude_vt, latitude_f, longitude_f, db_binder=None): """Initialize the service.""" # Start with the default configuration. Make a copy --- we will be modifying it merge_dict = ConfigObj(StringIO(DEFAULTS_INI)) # Now merge in the overrides from the config file merge_dict.merge(config_dict) # Extract out the part we're interested in self.svc_dict = merge_dict['StdWXCalculate'] if db_binder is None: db_binder = weewx.manager.DBBinder(config_dict) self.db_manager = db_binder.get_manager(data_binding=self.svc_dict.get( 'data_binding', 'wx_binding'), initialize=True) self.ignore_zero_wind = to_bool( self.svc_dict.get('ignore_zero_wind', True)) # Instantiate a PressureCooker to calculate various kinds of pressure self.pressure_cooker = PressureCooker( altitude_vt, to_int(self.svc_dict.get('max_delta_12h', 1800)), self.svc_dict['Algorithms'].get('altimeter', 'aaASOS')) # Instantiate a RainRater to calculate rainRate self.rain_rater = RainRater( to_int(self.svc_dict.get('rain_period', 900)), to_int(self.svc_dict.get('retain_period', 930))) # Instantiate a WXXTypes object to calculate simple scalars (like dewpoint, etc.) self.wx_types = WXXTypes(self.svc_dict, altitude_vt, latitude_f, longitude_f) # Now add all our type extensions into the type system weewx.xtypes.xtypes.append(self.pressure_cooker) weewx.xtypes.xtypes.append(self.rain_rater) weewx.xtypes.xtypes.append(self.wx_types) # Report about which values will be calculated... log.info( "The following values will be calculated: %s", ', '.join([ "%s=%s" % (k, self.svc_dict['Calculations'][k]) for k in self.svc_dict['Calculations'] ])) # ...and which algorithms will be used. log.info( "The following algorithms will be used for calculations: %s", ', '.join([ "%s=%s" % (k, self.svc_dict['Algorithms'][k]) for k in self.svc_dict['Algorithms'] ]))
def run(self): """This is where the actual work gets done. Runs through the list of reports. """ if self.gen_ts: syslog.syslog(syslog.LOG_DEBUG, "reportengine: Running reports for time %s" % weeutil.weeutil.timestamp_to_string(self.gen_ts)) else: syslog.syslog(syslog.LOG_DEBUG, "reportengine: " "Running reports for latest time in the database.") # Iterate over each requested report for report in self.config_dict['StdReport'].sections: # See if this report is disabled enabled = to_bool(self.config_dict['StdReport'][report].get('enable', True)) if not enabled: syslog.syslog(syslog.LOG_DEBUG, "reportengine: Skipping report %s" % report) continue syslog.syslog(syslog.LOG_DEBUG, "reportengine: Running report %s" % report) # Figure out where the configuration file is for the skin used for # this report: skin_config_path = os.path.join( self.config_dict['WEEWX_ROOT'], self.config_dict['StdReport']['SKIN_ROOT'], self.config_dict['StdReport'][report].get('skin', 'Standard'), 'skin.conf') # Retrieve the configuration dictionary for the skin. Wrap it in # a try block in case we fail try: skin_dict = configobj.ConfigObj(skin_config_path, file_error=True) syslog.syslog( syslog.LOG_DEBUG, "reportengine: Found configuration file %s for report %s" % (skin_config_path, report)) except IOError, e: syslog.syslog( syslog.LOG_ERR, "reportengine: " "Cannot read skin configuration file %s for report %s: %s" % (skin_config_path, report, e)) syslog.syslog(syslog.LOG_ERR, " **** Report ignored") continue except SyntaxError, e: syslog.syslog( syslog.LOG_ERR, "reportengine: " "Failed to read skin configuration file %s for report %s: %s" % (skin_config_path, report, e)) syslog.syslog(syslog.LOG_ERR, " **** Report ignored") continue
def run(self): copy_dict = self.skin_dict['CopyGenerator'] # determine how much logging is desired log_success = to_bool(copy_dict.get('log_success', True)) copy_list = [] if self.first_run: # Get the list of files to be copied only once, at the first # invocation of the generator. Wrap in a try block in case the # list does not exist. try: copy_list += weeutil.weeutil.option_as_list(copy_dict['copy_once']) except KeyError: pass # Get the list of files to be copied everytime. Again, wrap in a # try block. try: copy_list += weeutil.weeutil.option_as_list(copy_dict['copy_always']) except KeyError: pass # Change directory to the skin subdirectory: os.chdir(os.path.join(self.config_dict['WEEWX_ROOT'], self.skin_dict['SKIN_ROOT'], self.skin_dict['skin'])) # Figure out the destination of the files html_dest_dir = os.path.join(self.config_dict['WEEWX_ROOT'], self.skin_dict['HTML_ROOT']) # The copy list can contain wildcard characters. Go through the # list globbing any character expansions ncopy = 0 for pattern in copy_list: # Glob this pattern; then go through each resultant filename: for _file in glob.glob(pattern): # Final destination is the join of the html destination # directory and any relative subdirectory on the filename: dest_dir = os.path.join(html_dest_dir, os.path.dirname(_file)) # Make the destination directory, wrapping it in a try block in # case it already exists: try: os.makedirs(dest_dir) except OSError: pass # This version of copy does not copy over modification time, # so it will look like a new file, causing it to be (for # example) ftp'd to the server: shutil.copy(_file, dest_dir) ncopy += 1 if log_success: syslog.syslog(syslog.LOG_INFO, "copygenerator: " "copied %d files to %s" % (ncopy, html_dest_dir))
def setup(self): self.image_dict = self.skin_dict['ImageGenerator'] self.title_dict = self.skin_dict.get('Labels', {}).get('Generic', {}) self.formatter = weewx.units.Formatter.fromSkinDict(self.skin_dict) self.converter = weewx.units.Converter.fromSkinDict(self.skin_dict) # determine how much logging is desired self.log_success = to_bool(self.image_dict.get('log_success', True)) # ensure that we are in a consistent right location os.chdir(os.path.join(self.config_dict['WEEWX_ROOT'], self.skin_dict['SKIN_ROOT'], self.skin_dict['skin']))
def __init__(self, config_dict, fix_config_dict): """A generic initialisation.""" # save our weewx config dict self.config_dict = config_dict # save our fix config dict self.fix_config_dict = fix_config_dict # get our name self.name = fix_config_dict['name'] # is this a dry run self.dry_run = to_bool(fix_config_dict.get('dry_run', True))
def __init__(self, config_dict, name, is_proxy): self.is_proxy = is_proxy # Raise KeyEror if name not in dictionary. source_dict = config_dict[name] self.enable = to_bool(source_dict.get('enable', False)) self.hostname = source_dict.get('hostname', '') if is_proxy: self.port = to_int(source_dict.get('port', 8000)) else: self.port = to_int(source_dict.get('port', 80)) self.timeout = to_int(source_dict.get('timeout', 10))
def run(self): import weeutil.ftpupload # determine how much logging is desired log_success = to_bool(self.skin_dict.get('log_success', True)) t1 = time.time() if 'HTML_ROOT' in self.skin_dict: local_root = os.path.join(self.config_dict['WEEWX_ROOT'], self.skin_dict['HTML_ROOT']) else: local_root = os.path.join(self.config_dict['WEEWX_ROOT'], self.config_dict['StdReport']['HTML_ROOT']) try: ftp_data = weeutil.ftpupload.FtpUpload( server=self.skin_dict['server'], user=self.skin_dict['user'], password=self.skin_dict['password'], local_root=local_root, remote_root=self.skin_dict['path'], port=int(self.skin_dict.get('port', 21)), name=self.skin_dict['REPORT_NAME'], passive=to_bool(self.skin_dict.get('passive', True)), max_tries=int(self.skin_dict.get('max_tries', 3)), secure=to_bool(self.skin_dict.get('secure_ftp', False)), debug=int(self.skin_dict.get('debug', 0)), secure_data=to_bool(self.skin_dict.get('secure_data', True))) except Exception: syslog.syslog(syslog.LOG_DEBUG, "ftpgenerator: FTP upload not requested. Skipped.") return try: n = ftp_data.run() except (socket.timeout, socket.gaierror, ftplib.all_errors, IOError), e: (cl, unused_ob, unused_tr) = sys.exc_info() syslog.syslog(syslog.LOG_ERR, "ftpgenerator: " "Caught exception %s: %s" % (cl, e)) weeutil.weeutil.log_traceback(" **** ") return
def run(self): import user.sftpupload # determine how much logging is desired log_success = to_bool(self.skin_dict.get('log_success', True)) t1 = time.time() if self.skin_dict.has_key('HTML_ROOT'): local_root = os.path.join(self.config_dict['WEEWX_ROOT'], self.skin_dict['HTML_ROOT']) else: local_root = os.path.join(self.config_dict['WEEWX_ROOT'], self.config_dict['StdReport']['HTML_ROOT']) try: """Initialize an instance of FtpUpload. After initializing, call method run() to perform the upload.""" sftpData = SFTPUpload( #print(config_dict) #server: The remote server to which the files are to be uploaded. server = self.skin_dict['server'], #user, password : The user name and password that are to be used. user = self.skin_dict['user'], password = self.skin_dict['password'], #the local_root of the weewx public_html files. local_root = local_root, #the remote path we are looking to upload to. remote_root = self.skin_dict.get('path', 'public_html'), #name: A unique name to be given for this FTP session. This allows more #than one session to be uploading from the same local directory. [Optional. #Default is 'FTP'.] name = 'SFTP', #max_tries: How many times to try creating a directory or uploading #a file before giving up [Optional. Default is 3] max_tries = int(self.skin_dict.get('max_tries', 3)), #debug: Set to 1 for extra debug information, 0 otherwise. debug = int(self.config_dict.get('debug', 1)) ) #End SFTPUploader Initialisation. except Exception, e: syslog.syslog(syslog.LOG_DEBUG, "sftp-reportengine: SFTP upload not requested. Skipped.") print(e) return
def __init__(self, engine, config_dict): super(AddObservations, self).__init__(engine, config_dict) service_dict = config_dict.get('AdditionalObservations', {}) enable = to_bool(service_dict.get('enable', True)) if not enable: loginf("Not enabled, exiting.") return weewx_config = service_dict.get('weewx') if weewx_config: self._config_weewx(weewx_config)
def __init__(self, queue, server_url, org, bucket, token, loop_bucket=None, line_format='single-line', measurement='record', tags=None, unit_system=None, augment_record=True, inputs=dict(), obs_to_upload='most', append_units_label=True, add_binding_tag=True, skip_upload=False, manager_dict=None, post_interval=None, max_backlog=MAX_SIZE, stale=None, log_success=True, log_failure=True, timeout=60, max_tries=3, retry_wait=5): super(InfluxThread, self).__init__(queue, protocol_name='Influx', manager_dict=manager_dict, post_interval=post_interval, max_backlog=max_backlog, stale=stale, log_success=log_success, log_failure=log_failure, max_tries=max_tries, timeout=timeout, retry_wait=retry_wait) self.org = org self.bucket = bucket self.token = token self.loop_bucket = loop_bucket self.measurement = measurement self.tags = tags self.obs_to_upload = obs_to_upload self.append_units_label = append_units_label self.inputs = inputs self.server_url = server_url self.skip_upload = to_bool(skip_upload) self.unit_system = unit_system self.augment_record = augment_record self.templates = dict() self.line_format = line_format self.add_binding_tag = to_bool(add_binding_tag)
def configure_fields(self, fields_dict, ignore, publish_none_value, append_unit_label, conversion_type, format_string): """ Configure the fields. """ # pylint: disable=too-many-arguments fields = {} for field in fields_dict.sections: fields[field] = {} field_dict = fields_dict.get(field, {}) fields[field]['name'] = field_dict.get('name', None) fields[field]['unit'] = field_dict.get('unit', None) fields[field]['ignore'] = to_bool(field_dict.get('ignore', ignore)) fields[field]['publish_none_value'] = to_bool( field_dict.get('publish_none_value', publish_none_value)) fields[field]['append_unit_label'] = to_bool( field_dict.get('append_unit_label', append_unit_label)) fields[field]['conversion_type'] = field_dict.get( 'conversion_type', conversion_type) fields[field]['format_string'] = field_dict.get( 'format_string', format_string) logdbg(self.publish_type, fields) return fields
def __init__(self, **stn_dict): loginf('driver version is %s' % DRIVER_VERSION) self.model = stn_dict.get('model', 'AcuRite') self.max_tries = int(stn_dict.get('max_tries', 10)) self.retry_wait = int(stn_dict.get('retry_wait', 30)) self.polling_interval = int(stn_dict.get('polling_interval', 6)) self.use_constants = to_bool(stn_dict.get('use_constants', False)) self.ignore_bounds = to_bool(stn_dict.get('ignore_bounds', False)) if self.use_constants: loginf('R2 will be decoded using sensor constants') if self.ignore_bounds: loginf('R2 bounds on constants will be ignored') self.enable_r3 = int(stn_dict.get('enable_r3', 0)) if self.enable_r3: loginf('R3 data will be attempted') self.last_rain = None self.last_r3 = None self.r3_fail_count = 0 self.r3_max_fail = 3 self.r1_next_read = 0 self.r2_next_read = 0 global DEBUG_RAW DEBUG_RAW = int(stn_dict.get('debug_raw', 0))
def __init__(self, queue, database, username=None, password=None, dbadmin_username=None, dbadmin_password=None, line_format='single-line', create_database=True, measurement='record', tags=None, unit_system=None, augment_record=True, inputs=dict(), obs_to_upload='most', append_units_label=True, server_url=_DEFAULT_SERVER_URL, skip_upload=False, manager_dict=None, post_interval=None, max_backlog=sys.maxint, stale=None, log_success=True, log_failure=True, timeout=60, max_tries=3, retry_wait=5): super(InfluxThread, self).__init__(queue, protocol_name='Influx', manager_dict=manager_dict, post_interval=post_interval, max_backlog=max_backlog, stale=stale, log_success=log_success, log_failure=log_failure, max_tries=max_tries, timeout=timeout, retry_wait=retry_wait) self.database = database self.username = username self.password = password self.measurement = measurement self.tags = tags self.obs_to_upload = obs_to_upload self.append_units_label = append_units_label self.inputs = inputs self.server_url = server_url self.skip_upload = to_bool(skip_upload) self.unit_system = unit_system self.augment_record = augment_record self.templates = dict() self.line_format = line_format if create_database: uname = None pword = None if dbadmin_username is not None: uname = dbadmin_username pword = dbadmin_password elif username is not None: uname = username pword = password self.create_database(uname, pword)
def __init__(self, q, password, server_url=Meteotemplate.DEFAULT_URL, skip_upload=False, manager_dict=None, post_interval=None, max_backlog=sys.maxsize, stale=None, log_success=True, log_failure=True, timeout=60, max_tries=3, retry_wait=5): super(MeteotemplateThread, self).__init__( q, protocol_name='Meteotemplate', manager_dict=manager_dict, post_interval=post_interval, max_backlog=max_backlog, stale=stale, log_success=log_success, log_failure=log_failure, max_tries=max_tries, timeout=timeout, retry_wait=retry_wait) self.server_url = server_url self.password = password self.skip_upload = to_bool(skip_upload) self.field_map = self.create_default_field_map() # FIXME: make field map changes available via config file loginf("Data will be uploaded to %s" % self.server_url)
def __init__(self, engine, config_dict): super(PublishQueue, self).__init__(engine, config_dict) self.publish_type = 'Queue' service_dict = config_dict.get('MQTTPublish', {}).get('PublishQueue', {}) self.enable = to_bool(service_dict.get('enable', True)) if not self.enable: loginf(self.publish_type, "Not enabled, exiting.") return self._thread = PublishQueueThread(config_dict) self._thread.start() logdbg(self.publish_type, "Threadid of PublishQueue is: %s" % gettid())
def __init__( self, queue, username, password, latitude, longitude, altitude, station_name, manager_dict, server_url=_SERVER_URL, skip_upload=False, post_interval=None, max_backlog=0, stale=None, log_success=True, log_failure=True, timeout=60, max_tries=3, retry_wait=5, ): super(OpenWeatherMapThread, self).__init__( queue, protocol_name="OWM", manager_dict=manager_dict, post_interval=post_interval, max_backlog=max_backlog, stale=stale, log_success=log_success, log_failure=log_failure, timeout=timeout, max_tries=max_tries, retry_wait=retry_wait, ) self.username = username self.password = password self.latitude = float(latitude) self.longitude = float(longitude) self.altitude = float(altitude) self.station_name = station_name self.server_url = server_url self.skip_upload = to_bool(skip_upload)
def __init__(self, engine, config_dict): super(AS3935, self).__init__(engine, config_dict) loginf("service version is %s" % VERSION) svc_dict = config_dict.get("AS3935", {}) addr = int(svc_dict.get("address", 0x03)) bus = int(svc_dict.get("bus", 1)) indoors = to_bool(svc_dict.get("indoors", True)) noise_floor = int(svc_dict.get("noise_floor", 0)) calib = int(svc_dict.get("calibration", 0x6)) pin = int(svc_dict.get("pin", 17)) self.binding = svc_dict.get("data_binding", None) self.data = [] # if a binding was specified, then use it to save strikes to database if self.binding is not None: # configure the lightning database dbm_dict = weewx.manager.get_manager_dict( config_dict["DataBindings"], config_dict["Databases"], self.binding, default_binding_dict=get_default_binding_dict(), ) with weewx.manager.open_manager(dbm_dict, initialize=True) as dbm: # ensure schema on disk matches schema in memory dbcol = dbm.connection.columnsOf(dbm.table_name) memcol = [x[0] for x in dbm_dict["schema"]] if dbcol != memcol: raise Exception("as3935: schema mismatch: %s != %s" % (dbcol, memcol)) # configure the gpio and sensor GPIO.setmode(GPIO.BCM) GPIO.setup(pin, GPIO.IN) self.sensor = RPi_AS3935(address=addr, bus=bus) self.sensor.set_indoors(indoors) self.sensor.set_noise_floor(noise_floor) self.sensor.calibrate(tun_cap=calib) # add a gpio callback for the lightning strikes GPIO.add_event_detect(pin, GPIO.RISING, callback=self.handle_interrupt) # on each new archive record, read then clear data since last record self.bind(weewx.NEW_ARCHIVE_RECORD, self.read_data)
def __init__(self, queue, id, key, manager_dict, server_url=_SERVER_URL, skip_upload=False, post_interval=600, max_backlog=sys.maxint, stale=None, log_success=True, log_failure=True, timeout=60, max_tries=3, retry_wait=5): super(WeatherCloudThread, self).__init__(queue, protocol_name='WeatherCloud', manager_dict=manager_dict, post_interval=post_interval, max_backlog=max_backlog, stale=stale, log_success=log_success, log_failure=log_failure, max_tries=max_tries, timeout=timeout, retry_wait=retry_wait) self.id = id self.key = key self.server_url = server_url self.skip_upload = to_bool(skip_upload)
def __init__(self, queue, username, password, manager_dict, server_url=_SERVER_URL, skip_upload=False, post_interval=None, max_backlog=sys.maxint, stale=None, log_success=True, log_failure=True, timeout=60, max_tries=3, retry_wait=5): super(WetterThread, self).__init__(queue, protocol_name='Wetter', manager_dict=manager_dict, post_interval=post_interval, max_backlog=max_backlog, stale=stale, log_success=log_success, log_failure=log_failure, max_tries=max_tries, timeout=timeout, retry_wait=retry_wait) self.username = username self.password = password self.server_url = server_url self.skip_upload = to_bool(skip_upload)
def __init__(self, **stn_dict): loginf('driver version is %s' % DRIVER_VERSION) global DEBUG_SERIAL DEBUG_SERIAL = int(stn_dict.get('debug_serial', 0)) global DEBUG_CHECKSUM DEBUG_CHECKSUM = int(stn_dict.get('debug_checksum', 0)) global DEBUG_OPENCLOSE DEBUG_OPENCLOSE = int(stn_dict.get('debug_openclose', 0)) self.model = stn_dict.get('model', 'CC3000') port = stn_dict.get('port', CC3000.DEFAULT_PORT) loginf('using serial port %s' % port) self.polling_interval = float(stn_dict.get('polling_interval', 1)) loginf('polling interval is %s seconds' % self.polling_interval) self.use_station_time = to_bool(stn_dict.get('use_station_time', True)) loginf('using %s time for loop packets' % ('station' if self.use_station_time else 'computer')) self.max_tries = int(stn_dict.get('max_tries', 5)) self.retry_wait = int(stn_dict.get('retry_wait', 60)) self.sensor_map = stn_dict.get('sensor_map', self.DEFAULT_SENSOR_MAP) self.last_rain = None self.station = CC3000(port) self.station.open() # report the station configuration settings = self._init_station_with_retries( self.station, self.max_tries, self.retry_wait) loginf('firmware: %s' % settings['firmware']) self.arcint = settings['arcint'] loginf('archive_interval: %s' % self.arcint) self.header = settings['header'] loginf('header: %s' % self.header) self.units = weewx.METRICWX if settings['units'] == 'METRIC' else weewx.US loginf('units: %s' % settings['units']) loginf('channel: %s' % settings['channel']) loginf('charger status: %s' % settings['charger'])
def genImages(self, gen_ts): """Generate the images. The time scales will be chosen to include the given timestamp, with nice beginning and ending times. gen_ts: The time around which plots are to be generated. This will also be used as the bottom label in the plots. [optional. Default is to use the time of the last record in the database.] """ t1 = time.time() ngen = 0 # Loop over each time span class (day, week, month, etc.): for timespan in self.image_dict.sections : # Now, loop over all plot names in this time span class: for plotname in self.image_dict[timespan].sections : # Accumulate all options from parent nodes: plot_options = weeutil.weeutil.accumulateLeaves( self.image_dict[timespan][plotname]) plotgen_ts = gen_ts if not plotgen_ts: binding = plot_options['data_binding'] archive = self.db_binder.get_manager(binding) plotgen_ts = archive.lastGoodStamp() if not plotgen_ts: plotgen_ts = time.time() image_root = os.path.join(self.config_dict['WEEWX_ROOT'], plot_options['HTML_ROOT']) # Get the path that the image is going to be saved to: img_file = os.path.join(image_root, '%s.png' % plotname) # Check whether this plot needs to be done at all: ai = plot_options.as_int('aggregate_interval') if plot_options.has_key('aggregate_interval') else None if skipThisPlot(plotgen_ts, ai, img_file) : continue # Create the subdirectory that the image is to be put in. # Wrap in a try block in case it already exists. try: os.makedirs(os.path.dirname(img_file)) except OSError: pass # Create a new instance of a time plot and start adding to it plot = weeplot.genplot.TimePlot(plot_options) # Calculate a suitable min, max time for the requested time # span and set it (minstamp, maxstamp, timeinc) = weeplot.utilities.scaletime(plotgen_ts - int(plot_options.get('time_length', 86400)), plotgen_ts) plot.setXScaling((minstamp, maxstamp, timeinc)) # Set the y-scaling, using any user-supplied hints: plot.setYScaling(weeutil.weeutil.convertToFloat(plot_options.get('yscale', ['None', 'None', 'None']))) # Get a suitable bottom label: bottom_label_format = plot_options.get('bottom_label_format', '%m/%d/%y %H:%M') bottom_label = time.strftime(bottom_label_format, time.localtime(plotgen_ts)) plot.setBottomLabel(bottom_label) # Set day/night display plot.setLocation(self.stn_info.latitude_f, self.stn_info.longitude_f) plot.setDayNight(to_bool(plot_options.get('show_daynight', False)), weeplot.utilities.tobgr(plot_options.get('daynight_day_color', '0xffffff')), weeplot.utilities.tobgr(plot_options.get('daynight_night_color', '0xf0f0f0')), weeplot.utilities.tobgr(plot_options.get('daynight_edge_color', '0xefefef'))) # Loop over each line to be added to the plot. for line_name in self.image_dict[timespan][plotname].sections: # Accumulate options from parent nodes. line_options = weeutil.weeutil.accumulateLeaves(self.image_dict[timespan][plotname][line_name]) # See what SQL variable type to use for this line. By # default, use the section name. var_type = line_options.get('data_type', line_name) # Look for aggregation type: aggregate_type = line_options.get('aggregate_type') if aggregate_type in (None, '', 'None', 'none'): # No aggregation specified. aggregate_type = aggregate_interval = None else : try: # Aggregation specified. Get the interval. aggregate_interval = line_options.as_int('aggregate_interval') except KeyError: syslog.syslog(syslog.LOG_ERR, "imagegenerator: aggregate interval required for aggregate type %s" % aggregate_type) syslog.syslog(syslog.LOG_ERR, "imagegenerator: line type %s skipped" % var_type) continue # Now its time to find and hit the database: binding = line_options['data_binding'] archive = self.db_binder.get_manager(binding) (start_vec_t, stop_vec_t, data_vec_t) = \ archive.getSqlVectors((minstamp, maxstamp), var_type, aggregate_type=aggregate_type, aggregate_interval=aggregate_interval) if weewx.debug: assert(len(start_vec_t) == len(stop_vec_t)) # Do any necessary unit conversions: new_start_vec_t = self.converter.convert(start_vec_t) new_stop_vec_t = self.converter.convert(stop_vec_t) new_data_vec_t = self.converter.convert(data_vec_t) # Add a unit label. NB: all will get overwritten except the # last. Get the label from the configuration dictionary. # TODO: Allow multiple unit labels, one for each plot line? unit_label = line_options.get('y_label', weewx.units.get_label_string(self.formatter, self.converter, var_type)) # Strip off any leading and trailing whitespace so it's # easy to center plot.setUnitLabel(unit_label.strip()) # See if a line label has been explicitly requested: label = line_options.get('label') if not label: # No explicit label. Is there a generic one? # If not, then the SQL type will be used instead label = self.title_dict.get(var_type, var_type) # See if a color has been explicitly requested. color = line_options.get('color') if color is not None: color = weeplot.utilities.tobgr(color) # Get the line width, if explicitly requested. width = to_int(line_options.get('width')) # Get the type of plot ("bar', 'line', or 'vector') plot_type = line_options.get('plot_type', 'line') interval_vec = None # Some plot types require special treatments: if plot_type == 'vector': vector_rotate_str = line_options.get('vector_rotate') vector_rotate = -float(vector_rotate_str) if vector_rotate_str is not None else None else: vector_rotate = None gap_fraction = None if plot_type == 'bar': interval_vec = [x[1] - x[0]for x in zip(new_start_vec_t.value, new_stop_vec_t.value)] elif plot_type == 'line': gap_fraction = to_float(line_options.get('line_gap_fraction')) if gap_fraction is not None: if not 0 < gap_fraction < 1: syslog.syslog(syslog.LOG_ERR, "imagegenerator: Gap fraction %5.3f outside range 0 to 1. Ignored." % gap_fraction) gap_fraction = None # Get the type of line (only 'solid' or 'none' for now) line_type = line_options.get('line_type', 'solid') if line_type.strip().lower() in ['', 'none']: line_type = None marker_type = line_options.get('marker_type') marker_size = to_int(line_options.get('marker_size', 8)) # Add the line to the emerging plot: plot.addLine(weeplot.genplot.PlotLine( new_stop_vec_t[0], new_data_vec_t[0], label = label, color = color, width = width, plot_type = plot_type, line_type = line_type, marker_type = marker_type, marker_size = marker_size, bar_width = interval_vec, vector_rotate = vector_rotate, gap_fraction = gap_fraction)) # OK, the plot is ready. Render it onto an image image = plot.render() try: # Now save the image image.save(img_file) ngen += 1 except IOError, e: syslog.syslog(syslog.LOG_CRIT, "imagegenerator: Unable to save to file '%s' %s:" % (img_file, e))
def __init__(self, **stn_dict): loginf("version is %s" % DRIVER_VERSION) self.directmap = { 'wh2600USA_v2.2.0' : { 'dateTime' : ('epoch', to_int), 'inTemp' : ('inTemp', to_float), 'inHumidity' : ('inHumi', to_float), 'pressure' : ('AbsPress', to_float), 'outTemp' : ('outTemp',to_float), 'outHumidity' : ('outHumi', to_float), 'windDir' : ('windir', to_float), 'windSpeed' : ('avgwind', to_float), 'windGust' : ('gustspeed', to_float), 'radiation' : ('solarrad', to_float), 'UV' : ('uvi', to_float), 'rain' : ('rainofyearly', to_float), 'inTempBatteryStatus' : ('inBattSta', self.norm), 'outTempBatteryStatus' : ('outBattSta1', self.norm) }, 'default' : { 'dateTime' : ('epoch', to_int), 'inTemp' : ('inTemp', to_float), 'inHumidity' : ('inHumi', to_float), 'pressure' : ('AbsPress', to_float), 'outTemp' : ('outTemp',to_float), 'outHumidity' : ('outHumi', to_float), 'windDir' : ('windir', to_float), 'windSpeed' : ('avgwind', to_float), 'windGust' : ('gustspeed', to_float), 'radiation' : ('solarrad', to_float), 'UV' : ('uvi', to_float), 'rain' : ('rainofyearly', to_float), }, 'wu' : { 'dateTime' : ('epoch', to_int), 'outTemp' : ('tempf',to_float), 'outHumidity' : ('humidity', to_float), 'dewpoint' : ('dewptf', to_float), 'windchill' : ('windchillf', to_float), 'windDir' : ('winddir', to_float), 'windSpeed' : ('windspeedmph', to_float), 'windGust' : ('windgustmph', to_float), 'rain' : ('yearlyrainin', to_float), 'radiation' : ('solarradiation', to_float), 'UV' : ('UV', to_float), 'inTemp' : ('indoortempf', to_float), 'inHumidity' : ('indoorhumidity', to_float), 'pressure' : ('baromin', to_float), 'txBatteryStatus' : ('lowbatt', to_float), } } self.xferfile = stn_dict['xferfile'] self.poll_interval = float(stn_dict.get('poll_interval', 10)) self.dup_interval = float(stn_dict.get('dup_interval', 5)) self.max_tries = int(stn_dict.get('max_tries', 5)) self.retry_wait = int(stn_dict.get('retry_wait', 2)) self.directtx = to_bool(stn_dict.get('direct', False)) self.mode = stn_dict.get('mode', 'direct') self.check_calibration = to_bool(stn_dict.get('check_calibration',False)) self.set_calibration = to_bool(stn_dict.get('set_calibration', False)) self.lastrain = None self.lastpacket = 0 if self.mode == 'direct': self.obshardware = OpserverIPHardware(**stn_dict) if self.chkunits(self.expected_units): logerr("calibration error: %s is expexted to be %f but is %f" % (i, to_float(calibdata[i]), to_float(stcalib[i]))) raise Exception("Station units not set correctly") if self.obshardware.version() in self.directmap: self.map = self.directmap[self.obshardware.version()] else: loginf("Unknown firmware version: %s" % self.obshardware.version()) self.map = self.directmap['default'] else: self.map = self.directmap['wu'] if self.check_calibration: self.obshardware = OpserverIPHardware(**stn_dict) if self.chkunits(self.expected_units): raise Exception("Station units not set correctly") if 'calibration' in stn_dict and self.check_calibration: if self.chkcalib(stn_dict['calibration']): if(self.set_calibration): self.obshardware.setcalibration(stn_dict['calibration']) if self.chkcalib(stn_dict['calibration']): raise Exception("Setting calibration unsuccessful") else: raise Exception("calibration error") loginf("polling interval is %s" % self.poll_interval)
def main(options, args, engine_class=StdEngine): """Prepare the main loop and run it. Mostly consists of a bunch of high-level preparatory calls, protected by try blocks in the case of an exception.""" # Set the logging facility. syslog.openlog(options.log_label, syslog.LOG_PID | syslog.LOG_CONS) # Set up the signal handlers. signal.signal(signal.SIGHUP, sigHUPhandler) signal.signal(signal.SIGTERM, sigTERMhandler) syslog.syslog(syslog.LOG_INFO, "engine: Initializing weewx version %s" % weewx.__version__) syslog.syslog(syslog.LOG_INFO, "engine: Using Python %s" % sys.version) syslog.syslog(syslog.LOG_INFO, "engine: Platform %s" % platform.platform()) # Save the current working directory. A service might # change it. In case of a restart, we need to change it back. cwd = os.getcwd() if options.daemon: syslog.syslog(syslog.LOG_INFO, "engine: pid file is %s" % options.pidfile) daemon.daemonize(pidfile=options.pidfile) # for backward compatibility, recognize loop_on_init from command-line loop_on_init = options.loop_on_init # be sure that the system has a reasonable time (at least 1 jan 2000). # log any problems every minute. n = 0 while weewx.launchtime_ts < 946684800: if n % 120 == 0: syslog.syslog(syslog.LOG_INFO, "engine: waiting for sane time. current time is %s" % weeutil.weeutil.timestamp_to_string(weewx.launchtime_ts)) n += 1 time.sleep(0.5) weewx.launchtime_ts = time.time() while True: os.chdir(cwd) config_path = os.path.abspath(args[0]) config_dict = getConfiguration(config_path) # Look for the debug flag. If set, ask for extra logging weewx.debug = int(config_dict.get('debug', 0)) if weewx.debug: syslog.setlogmask(syslog.LOG_UPTO(syslog.LOG_DEBUG)) else: syslog.setlogmask(syslog.LOG_UPTO(syslog.LOG_INFO)) # See if there is a loop_on_init directive in the configuration, but # use it only if nothing was specified via command-line. if loop_on_init is None: loop_on_init = to_bool(config_dict.get('loop_on_init', False)) try: syslog.syslog(syslog.LOG_DEBUG, "engine: Initializing engine") # Create and initialize the engine engine = engine_class(config_dict) syslog.syslog(syslog.LOG_INFO, "engine: Starting up weewx version %s" % weewx.__version__) # Start the engine. It should run forever unless an exception # occurs. Log it if the function returns. engine.run() syslog.syslog(syslog.LOG_CRIT, "engine: Unexpected exit from main loop. Program exiting.") # Catch any console initialization error: except InitializationError, e: # Log it: syslog.syslog(syslog.LOG_CRIT, "engine: Unable to load driver: %s" % e) # See if we should loop, waiting for the console to be ready. # Otherwise, just exit. if loop_on_init: syslog.syslog(syslog.LOG_CRIT, " **** Waiting 60 seconds then retrying...") time.sleep(60) syslog.syslog(syslog.LOG_NOTICE, "engine: retrying...") else: syslog.syslog(syslog.LOG_CRIT, " **** Exiting...") sys.exit(weewx.IO_ERROR) # Catch any recoverable weewx I/O errors: except weewx.WeeWxIOError, e: # Caught an I/O error. Log it, wait 60 seconds, then try again syslog.syslog(syslog.LOG_CRIT, "engine: Caught WeeWxIOError: %s" % e) if options.exit: syslog.syslog(syslog.LOG_CRIT, " **** Exiting...") sys.exit(weewx.IO_ERROR) syslog.syslog(syslog.LOG_CRIT, " **** Waiting 60 seconds then retrying...") time.sleep(60) syslog.syslog(syslog.LOG_NOTICE, "engine: retrying...")
def __init__(self, config_dict, skin_dict, gen_ts, first_run, stn_info, record=None): # Initialise my superclass super(ImageStackedWindRoseGenerator, self).__init__(config_dict, skin_dict, gen_ts, first_run, stn_info, record) # Get a manager for our archive _binding = self.config_dict['StdArchive'].get('data_binding', 'wx_binding') self.archive = self.db_binder.get_manager(_binding) self.log_success = to_bool(skin_dict.get('log_success', True)) self.log_failure = to_bool(skin_dict.get('log_failure', True)) # Set a few properties we will need self.image_dict = self.skin_dict['ImageStackedWindRoseGenerator'] self.title_dict = self.skin_dict['Labels']['Generic'] self.converter = Converter.fromSkinDict(self.skin_dict) # Set image attributes self.image_width = int(self.image_dict['image_width']) self.image_height = int(self.image_dict['image_height']) self.image_back_box_color = int(self.image_dict['image_background_box_color'], 0) self.image_back_circle_color = int(self.image_dict['image_background_circle_color'], 0) self.image_back_range_ring_color = int(self.image_dict['image_background_range_ring_color'], 0) self.image_back_image = self.image_dict['image_background_image'] # Set compass point abbreviations _compass = option_as_list(self.skin_dict['Labels'].get('compass_points', 'N, S, E, W')) self.north = _compass[0] self.south = _compass[1] self.east = _compass[2] self.west = _compass[3] # Set windrose attributes self.plot_border = int(self.image_dict['windrose_plot_border']) self.legend_bar_width = int(self.image_dict['windrose_legend_bar_width']) self.font_path = self.image_dict['windrose_font_path'] self.plot_font_size = int(self.image_dict['windrose_plot_font_size']) self.plot_font_color = int(self.image_dict['windrose_plot_font_color'], 0) self.legend_font_size = int(self.image_dict['windrose_legend_font_size']) self.legend_font_color = int(self.image_dict['windrose_legend_font_color'], 0) self.label_font_size = int(self.image_dict['windrose_label_font_size']) self.label_font_color = int(self.image_dict['windrose_label_font_color'], 0) # Look for petal colours, if not defined then set some defaults _colors = option_as_list(self.image_dict.get('windrose_plot_petal_colors', DEFAULT_PETAL_COLORS)) _colors = DEFAULT_PETAL_COLORS if len(_colors) < 7 else _colors self.petal_colors = [] for _color in _colors: try: # Can it be converted to a number? self.petal_colors.append(int(_color, 0)) except ValueError: # Cannot convert to a number, assume it is # a colour word so append it as is self.petal_colors.append(_color) # Get petal width, if not defined then set default to 16 (degrees) try: self.petal_width = int(self.image_dict['windrose_plot_petal_width']) except KeyError: self.petal_width = 16 # Boundaries for speed range bands, these mark the colour boundaries # on the stacked bar in the legend. 7 elements only (ie 0, 10% of max, # 20% of max...100% of max) self.speedFactor = [0.0, 0.1, 0.2, 0.3, 0.5, 0.7, 1.0] self.period = None self.p_gen_ts = None self.label = None self.t_stamp = None self.t_stamp_loc = None self.obName = None self.dirName = None self.units = None self.max_ring_value = None self.label_dir = None self.draw = None self.plotFont = None self.legendFont = None self.labelFont = None self.roseMaxDiameter = None self.originX = None self.originY = None self.image = None
def genImages(self, gen_ts): """Generate the images. The time scales will be chosen to include the given timestamp, with nice beginning and ending times. gen_ts: The time around which plots are to be generated. This will also be used as the bottom label in the plots. [optional. Default is to use the time of the last record in the archive database.] """ t1 = time.time() ngen = 0 # Loop over each time span class (day, week, month, etc.): for timespan in self.image_dict.sections : # Now, loop over all plot names in this time span class: for plotname in self.image_dict[timespan].sections : # Accumulate all options from parent nodes: plot_options = weeutil.weeutil.accumulateLeaves(self.image_dict[timespan][plotname]) # Get the database archive archivedb = self._getArchive(plot_options['archive_database']) plotgen_ts = gen_ts if not plotgen_ts: plotgen_ts = archivedb.lastGoodStamp() if not plotgen_ts: plotgen_ts = time.time() image_root = os.path.join(self.config_dict['WEEWX_ROOT'], plot_options['HTML_ROOT']) # Get the path of the file that the image is going to be saved to: img_file = os.path.join(image_root, '%s.png' % plotname) # Check whether this plot needs to be done at all: ai = plot_options.as_int('aggregate_interval') if plot_options.has_key('aggregate_interval') else None if skipThisPlot(plotgen_ts, ai, img_file) : continue # Create the subdirectory that the image is to be put in. # Wrap in a try block in case it already exists. try: os.makedirs(os.path.dirname(img_file)) except: pass # Create a new instance of a time plot and start adding to it plot = weeplot.genplot.TimePlot(plot_options) # Calculate a suitable min, max time for the requested time span and set it (minstamp, maxstamp, timeinc) = weeplot.utilities.scaletime(plotgen_ts - int(plot_options.get('time_length', 86400)), plotgen_ts) plot.setXScaling((minstamp, maxstamp, timeinc)) # Set the y-scaling, using any user-supplied hints: plot.setYScaling(weeutil.weeutil.convertToFloat(plot_options.get('yscale', ['None', 'None', 'None']))) # Get a suitable bottom label: bottom_label_format = plot_options.get('bottom_label_format', '%m/%d/%y %H:%M') bottom_label = time.strftime(bottom_label_format, time.localtime(plotgen_ts)) plot.setBottomLabel(bottom_label) # Set day/night display plot.setLocation(self.stn_info.latitude_f, self.stn_info.longitude_f) plot.setDayNight(to_bool(plot_options.get('show_daynight', False)), weeplot.utilities.tobgr(plot_options.get('daynight_day_color', '0xffffff')), weeplot.utilities.tobgr(plot_options.get('daynight_night_color', '0xf0f0f0')), weeplot.utilities.tobgr(plot_options.get('daynight_edge_color', '0xefefef'))) # Loop over each line to be added to the plot. for line_name in self.image_dict[timespan][plotname].sections: # Accumulate options from parent nodes. line_options = weeutil.weeutil.accumulateLeaves(self.image_dict[timespan][plotname][line_name]) # See what SQL variable type to use for this line. By default, # use the section name. var_type = line_options.get('data_type', line_name) # Add a unit label. NB: all will get overwritten except the last. # Get the label from the configuration dictionary. # TODO: Allow multiple unit labels, one for each plot line? unit_label = line_options.get('y_label', self.unit_helper.label.get(var_type, '')) # Strip off any leading and trailing whitespace so it's easy to center plot.setUnitLabel(unit_label.strip()) # See if a line label has been explicitly requested: label = line_options.get('label') if not label: # No explicit label. Is there a generic one? # If not, then the SQL type will be used instead label = self.title_dict.get(var_type, var_type) # See if a color has been explicitly requested. color = line_options.get('color') if color is not None: color = weeplot.utilities.tobgr(color) # Get the line width, if explicitly requested. width = to_int(line_options.get('width')) # Get the type of plot ("bar', 'line', or 'vector') plot_type = line_options.get('plot_type', 'line') if plot_type == 'vector': vector_rotate_str = line_options.get('vector_rotate') vector_rotate = -float(vector_rotate_str) if vector_rotate_str is not None else None else: vector_rotate = None # Get the type of line ('solid' or 'none' is all that's offered now) line_type = line_options.get('line_type', 'solid') if line_type.strip().lower() in ['', 'none']: line_type = None marker_type = line_options.get('marker_type') marker_size = to_int(line_options.get('marker_size')) # Look for aggregation type: aggregate_type = line_options.get('aggregate_type') if aggregate_type in (None, '', 'None', 'none'): # No aggregation specified. aggregate_type = None # Set the aggregate interval to the nominal archive interval: aggregate_interval = self._getArchiveInterval(archivedb) else : try: # Aggregation specified. Get the interval. aggregate_interval = line_options.as_int('aggregate_interval') except KeyError: syslog.syslog(syslog.LOG_ERR, "genimages: aggregate interval required for aggregate type %s" % aggregate_type) syslog.syslog(syslog.LOG_ERR, "genimages: line type %s skipped" % var_type) continue # Get the fraction that defines gap size if plot_type == 'bar': gap_fraction = line_options.get('bar_gap_fraction') elif plot_type == 'line': gap_fraction = line_options.get('line_gap_fraction') else: gap_fraction = None if gap_fraction is not None: gap_fraction = float(gap_fraction) if not 0 < gap_fraction < 1: syslog.syslog(syslog.LOG_ERR, "genimages: gap fraction must be greater than zero and less than one. Ignored.") gap_fraction = None # Get the time and data vectors from the database: (time_vec_t, data_vec_t) = archivedb.getSqlVectorsExtended(var_type, minstamp, maxstamp, aggregate_interval, aggregate_type) new_time_vec_t = self.converter.convert(time_vec_t) new_data_vec_t = self.converter.convert(data_vec_t) # Add the line to the emerging plot: plot.addLine(weeplot.genplot.PlotLine(new_time_vec_t[0], new_data_vec_t[0], label = label, color = color, width = width, plot_type = plot_type, line_type = line_type, marker_type = marker_type, marker_size = marker_size, bar_width = aggregate_interval, vector_rotate = vector_rotate, gap_fraction = gap_fraction)) # OK, the plot is ready. Render it onto an image image = plot.render() # Now save the image image.save(img_file) ngen += 1 t2 = time.time() syslog.syslog(syslog.LOG_INFO, "genimages: Generated %d images for %s in %.2f seconds" % (ngen, self.skin_dict['REPORT_NAME'], t2 - t1))