def __init__(self, config, gui_job): super(DataLogger, self).__init__() self.jobs = [] # thread hashmap for all running threads/jobs self.job_exit_flag = False # flag for stopping the thread self.job_sleep = 1 # TODO: Enahncement -> use condition objects self.timers = [] self._gui_job = gui_job self.data_queue = {} # universal data_queue hash map self.host = config['hosts']['default']['name'] self.port = config['hosts']['default']['port'] self.loggable_devices = [] self.ipcon = IPConnection() self.ipcon.register_callback(IPConnection.CALLBACK_CONNECTED, self.cb_connected) self.ipcon.register_callback(IPConnection.CALLBACK_ENUMERATE, self.cb_enumerate) try: self.ipcon.connect(self.host, self.port) # Connect to brickd except Exception as e: EventLogger.critical("A critical error occur: " + str(e)) self.ipcon = None raise DataLoggerException(DataLoggerException.DL_CRITICAL_ERROR, "A critical error occur: " + str(e)) EventLogger.info("Connection to " + self.host + ":" + str(self.port) + " established.") self.ipcon.set_timeout(1) # TODO: Timeout number EventLogger.debug("Set ipcon.time_out to 1.") self._config = config self.csv_file_name = 'logger_data_{0}.csv'.format(int(time.time())) self.csv_enabled = True self.stopped = False
def validate(self): """ This function performs the validation of the various sections of the JSON configuration file """ EventLogger.info("Validating config file") self._validate_hosts() self._validate_data() self._validate_debug() self._validate_devices() if self._error_count > 0: EventLogger.critical("Validation found {0} errors".format(self._error_count)) else: EventLogger.info("Validation successful") #logging_time = self._log_space_counter.calculate_time() #if self._log_space_counter.file_size != 0: # EventLogger.info("Logging time until old data will be overwritten.") # EventLogger.info("Days: " + str(logging_time[0]) + # " Hours: " + str(logging_time[1]) + # " Minutes: " + str(logging_time[2]) + # " Seconds: " + str(logging_time[3])) #EventLogger.info("Will write about " + str( # int(self._log_space_counter.lines_per_second + 0.5)) + " lines per second into the log-file.") return self._error_count == 0
def _job(self): try: # check for datalogger object if AbstractJob._job(self): return EventLogger.debug(self._job_name + " Started") csv_writer = CSVWriter(self._datalogger.csv_file_name) while True: if not self._datalogger.data_queue[self.name].empty(): csv_data = self._get_data_from_queue() #EventLogger.debug(self._job_name + " -> " + str(csv_data)) if not csv_writer.write_data_row(csv_data): EventLogger.warning(self._job_name + " Could not write csv row!") if not self._exit_flag and self._datalogger.data_queue[self.name].empty(): time.sleep(self._datalogger.job_sleep) if self._exit_flag and self._datalogger.data_queue[self.name].empty(): exit_return_value = csv_writer.close_file() if exit_return_value: EventLogger.debug(self._job_name + " Closed his csv_writer") else: EventLogger.debug( self._job_name + " Could NOT close his csv_writer! EXIT_RETURN_VALUE=" + str(exit)) EventLogger.debug(self._job_name + " Finished") self._remove_from_data_queue() break except Exception as e: EventLogger.critical(self._job_name + " " + str(e)) self.stop()
def __init__(self, config, gui_job=None): """ config -- brickv.data_logger.configuration_validator.Configuration """ super(DataLogger, self).__init__() self.jobs = [] # thread hashmap for all running threads/jobs self.job_exit_flag = False # flag for stopping the thread self.job_sleep = 1 # TODO: Enahncement -> use condition objects self.timers = [] self._gui_job = gui_job self.max_file_size = None self.max_file_count = None self.data_queue = {} # universal data_queue hash map self.host = config._general[ConfigurationReader.GENERAL_HOST] self.port = utils.Utilities.parse_to_int(config._general[ConfigurationReader.GENERAL_PORT]) self.ipcon = IPConnection() try: self.ipcon.connect(self.host, self.port) # Connect to brickd except Exception as e: EventLogger.critical("A critical error occur: " + str(e)) self.ipcon = None raise DataLoggerException(DataLoggerException.DL_CRITICAL_ERROR, "A critical error occur: " + str(e)) EventLogger.info("Connection to " + self.host + ":" + str(self.port) + " established.") self.ipcon.set_timeout(1) # TODO: Timeout number EventLogger.debug("Set ipcon.time_out to 1.") self._configuration = config self.default_file_path = "logged_data.csv" self.log_to_file = True self.log_to_xively = False self.stopped = False
def validate_devices_section(self): """ This function validates the devices out of the configuration file :return: """ device_definitions = Idf.DEVICE_DEFINITIONS for device in self.json_config._devices: # name blueprint = device_definitions[device[Idf.DD_NAME]] if blueprint is None: EventLogger.critical( self._generate_device_error_message(uid=device[Idf.DD_UID], tier_array=["general"], msg="no such device available")) continue # next device # uid if not Utilities.is_valid_string(device[Idf.DD_UID], 3) or device[Idf.DD_UID] == Idf.DD_UID_DEFAULT: EventLogger.critical( self._generate_device_error_message(uid=device[Idf.DD_UID], tier_array=["general"], msg="the UID from '"+device[Idf.DD_NAME]+"' is invalid")) device_values = device[Idf.DD_VALUES] blueprint_values = blueprint[Idf.DD_VALUES] # values for device_value in device_values: logged_values = 0 if device_value not in blueprint_values: EventLogger.critical( self._generate_device_error_message(uid=device[Idf.DD_UID], tier_array=["values"], msg="invalid value " + str(device_value))) else: # interval interval = device_values[device_value][Idf.DD_VALUES_INTERVAL] if not self._is_valid_interval(interval): EventLogger.critical( self._generate_device_error_message(uid=device[Idf.DD_UID], tier_array=["values"], msg="invalid interval " + str(interval))) # subvalue try: subvalues = device_values[device_value][Idf.DD_SUBVALUES] for value in subvalues: if not type(subvalues[value]) == bool: # type check for validation EventLogger.critical( self._generate_device_error_message( uid=device[Idf.DD_UID], tier_array=["values"], msg="invalid type " + str(value))) else: if subvalues[value]: # value check for "lines per second" calculation logged_values += 1 except KeyError: if interval > 0: # just one value to log logged_values += 1 if interval > 0: self._log_space_counter.add_lines_per_second(interval / 1000 * logged_values)
def _job(self): try: # check for datalogger object if AbstractJob._job(self): return EventLogger.debug(self._job_name + " Started") while True: if not self._datalogger.data_queue[self.name].empty(): csv_data = self._get_data_from_queue() self.signalNewData.emit(csv_data) if not self._exit_flag and self._datalogger.data_queue[ self.name].empty(): time.sleep(self._datalogger.job_sleep) if self._exit_flag and self._datalogger.data_queue[ self.name].empty(): self._remove_from_data_queue() break except Exception as e: EventLogger.critical(self._job_name + " -.- " + str(e)) self.stop()
def __init__(self, path_to_config=None, configuration=None): """ pathToConfig -- path to the json configuration file OR configuration -- the configuration itself """ self._configuration = Configuration() self._readConfigErr = 0 # Errors which occure during readin if path_to_config is None and configuration is None: EventLogger.critical( "ConfigurationReader needs a path to the configuration file or an actual configuration") return if path_to_config is not None: self.fileName = path_to_config self._read_json_config_file() if configuration is not None: if isinstance(configuration, Configuration): self._configuration = configuration else: self.map_dict_to_config(configuration) validator = ConfigurationValidator(self._configuration) validator._error_count += self._readConfigErr validator.validate()
def __init__(self, path_to_config=None, configuration=None): """ pathToConfig -- path to the json configuration file OR configuration -- the configuration itself """ self._configuration = Configuration() self._readConfigErr = 0 # Errors which occure during readin if path_to_config is None and configuration is None: EventLogger.critical( "ConfigurationReader needs a path to the configuration file or an actual configuration" ) return if path_to_config is not None: self.fileName = path_to_config self._read_json_config_file() if configuration is not None: if isinstance(configuration, Configuration): self._configuration = configuration else: self.map_dict_to_config(configuration) validator = ConfigurationValidator(self._configuration) validator._error_count += self._readConfigErr validator.validate()
def _job(self): try: # check for datalogger object if AbstractJob._job(self): return EventLogger.debug(self._job_name + " Started") csv_writer = CSVWriter(self._datalogger.csv_file_name) while True: if not self._datalogger.data_queue[self.name].empty(): csv_data = self._get_data_from_queue() #EventLogger.debug(self._job_name + " -> " + str(csv_data)) if not csv_writer.write_data_row(csv_data): EventLogger.warning(self._job_name + " Could not write csv row!") if not self._exit_flag and self._datalogger.data_queue[self.name].empty(): time.sleep(self._datalogger.job_sleep) if self._exit_flag and self._datalogger.data_queue[self.name].empty(): exit_return_value = csv_writer.close_file() if exit_return_value: EventLogger.debug(self._job_name + " Closed his csv_writer") else: EventLogger.debug( self._job_name + " Could NOT close his csv_writer! EXIT_RETURN_VALUE=" + str(exit)) EventLogger.debug(self._job_name + " Finished") self._remove_from_data_queue() break except Exception as e: EventLogger.critical(self._job_name + " " + str(e)) self.stop()
def validate(self): """ This function performs the validation of the various sections of the JSON configuration file """ EventLogger.info("Validating config file") self._validate_hosts() self._validate_data() self._validate_debug() self._validate_devices() if self._error_count > 0: EventLogger.critical("Validation found {0} errors".format(self._error_count)) else: EventLogger.info("Validation successful") #logging_time = self._log_space_counter.calculate_time() #if self._log_space_counter.file_size != 0: # EventLogger.info("Logging time until old data will be overwritten.") # EventLogger.info("Days: " + str(logging_time[0]) + # " Hours: " + str(logging_time[1]) + # " Minutes: " + str(logging_time[2]) + # " Seconds: " + str(logging_time[3])) #EventLogger.info("Will write about " + str( # int(self._log_space_counter.lines_per_second + 0.5)) + " lines per second into the log-file.") return self._error_count == 0
def update_setup_tab(self, general_section): """ Update the information of the setup tab with the given general_section. """ from brickv.data_logger.configuration_validator import ConfigurationReader try: # host combo_host setEditText(String) self.combo_host.setEditText( general_section[ConfigurationReader.GENERAL_HOST]) # port spinbox_port setValue(int) self.spinbox_port.setValue( general_section[ConfigurationReader.GENERAL_PORT]) # file_count spin_file_count setValue(int) self.spin_file_count.setValue( general_section[ConfigurationReader.GENERAL_LOG_COUNT]) # file_size spin_file_size setValue(int/1024/1024) (Byte -> MB) self.spin_file_size.setValue( (general_section[ConfigurationReader.GENERAL_LOG_FILE_SIZE] / 1024.0 / 1024.0)) # path_to_file line_path_to_file setText(string) self.line_path_to_file.setText( general_section[ConfigurationReader.GENERAL_PATH_TO_FILE]) # logfile path self.line_path_to_eventfile.setText( general_section[ConfigurationReader.GENERAL_EVENTLOG_PATH]) # loglevel ll = general_section[ConfigurationReader.GENERAL_EVENTLOG_LEVEL] od = collections.OrderedDict( sorted(GUILogger._convert_level.items())) counter = 0 # TODO better way to set the combo box index? for k in od.keys(): if ll == k: break counter += 1 self.combo_loglevel.setCurrentIndex(counter) # log_to_console def __checkbox_bool_setter(bool_value): if bool_value: return 2 else: return 0 self.checkbox_to_file.setChecked( __checkbox_bool_setter(general_section[ ConfigurationReader.GENERAL_EVENTLOG_TO_FILE])) # log_to_file self.checkbox_to_console.setCheckState( __checkbox_bool_setter(general_section[ ConfigurationReader.GENERAL_EVENTLOG_TO_CONSOLE])) except Exception as e: EventLogger.critical( "Could not read the General Section of the Config-File! -> " + str(e)) return
def main(config_filename, gui_config, gui_job, override_csv_file_name, override_log_file_name, interrupted_ref): """ This function initialize the data logger and starts the logging process """ config = None gui_start = False if config_filename != None: # started via console config = load_and_validate_config(config_filename) if config == None: return None else: # started via GUI config = gui_config gui_start = True if override_csv_file_name != None: config['data']['csv']['file_name'] = override_csv_file_name if override_log_file_name != None: config['debug']['log']['file_name'] = override_log_file_name try: if config['debug']['log']['enabled']: EventLogger.add_logger( FileLogger( 'FileLogger', log_level_name_to_id(config['debug']['log']['level']), config['debug']['log']['file_name'])) data_logger = DataLogger(config, gui_job) if data_logger.ipcon is not None: data_logger.run() if not gui_start: while not interrupted_ref[0]: try: time.sleep(0.25) except: pass data_logger.stop() sys.exit(0) else: raise DataLoggerException( DataLoggerException.DL_CRITICAL_ERROR, "DataLogger did not start logging process! Please check for errors." ) except Exception as exc: EventLogger.critical(str(exc)) if gui_start: return None else: sys.exit(DataLoggerException.DL_CRITICAL_ERROR) return data_logger
def main(config_filename, gui_config, gui_job, override_csv_file_name, override_log_file_name, interrupted_ref): """ This function initialize the data logger and starts the logging process """ config = None gui_start = False if config_filename != None: # started via console config = load_and_validate_config(config_filename) if config == None: return None else: # started via GUI config = gui_config gui_start = True if override_csv_file_name != None: config['data']['csv']['file_name'] = override_csv_file_name if override_log_file_name != None: config['debug']['log']['file_name'] = override_log_file_name if config['debug']['log']['enabled']: EventLogger.add_logger(FileLogger('FileLogger', log_level_name_to_id(config['debug']['log']['level']), config['debug']['log']['file_name'])) try: data_logger = DataLogger(config, gui_job) if data_logger.ipcon is not None: data_logger.run() if not gui_start: while not interrupted_ref[0]: try: time.sleep(0.25) except: pass data_logger.stop() sys.exit(0) else: raise DataLoggerException(DataLoggerException.DL_CRITICAL_ERROR, "DataLogger did not start logging process! Please check for errors.") except Exception as exc: EventLogger.critical(str(exc)) if gui_start: return None else: sys.exit(DataLoggerException.DL_CRITICAL_ERROR) return data_logger
def save_config(config, filename): EventLogger.info('Saving config to file: {0}'.format(filename)) try: s = json.dumps(config, ensure_ascii=False, sort_keys=True, indent=2).encode('utf-8') with open(filename, 'wb') as f: f.write(s) except Exception as e: EventLogger.critical('Could not write config file as JSON: {0}'.format(e)) return False EventLogger.info('Config successfully saved to: {0}'.format(filename)) return True
def save_config(config, filename): EventLogger.info('Saving config to file: {0}'.format(filename)) try: s = json.dumps(config, ensure_ascii=False, sort_keys=True, indent=2).encode('utf-8') with open(filename, 'wb') as f: f.write(s) except Exception as e: EventLogger.critical('Could not write config file as JSON: {0}'.format(e)) return False EventLogger.info('Config successfully saved to: {0}'.format(filename)) return True
def __init__(self, config, gui_job): super(DataLogger, self).__init__() self.daemon = True self.jobs = [] # thread hashmap for all running threads/jobs self.job_exit_flag = False # flag for stopping the thread self.job_sleep = 1 # TODO: Enahncement -> use condition objects self.timers = [] self._gui_job = gui_job self.data_queue = {} # universal data_queue hash map self.host = config['hosts']['default']['name'] self.port = config['hosts']['default']['port'] self.secret = config['hosts']['default']['secret'] if self.secret != None: try: self.secret = self.secret.encode('ascii') except: EventLogger.critical( 'Authentication secret cannot contain non-ASCII characters' ) self.secret = None self.loggable_devices = [] self.ipcon = IPConnection() self.ipcon.register_callback(IPConnection.CALLBACK_CONNECTED, self.cb_connected) self.ipcon.register_callback(IPConnection.CALLBACK_ENUMERATE, self.cb_enumerate) try: self.ipcon.connect(self.host, self.port) # Connect to brickd except Exception as e: EventLogger.critical("A critical error occur: " + str(e)) self.ipcon = None raise DataLoggerException(DataLoggerException.DL_CRITICAL_ERROR, "A critical error occur: " + str(e)) EventLogger.info("Connection to " + self.host + ":" + str(self.port) + " established.") self.ipcon.set_timeout(1) # TODO: Timeout number EventLogger.debug("Set ipcon.time_out to 1.") self._config = config self.csv_file_name = 'logger_data_{0}.csv'.format(int(time.time())) self.csv_enabled = True self.stopped = False
def update_setup_tab(self, general_section): """ Update the information of the setup tab with the given general_section. """ from brickv.data_logger.configuration_validator import ConfigurationReader try: # host combo_host setEditText(String) self.combo_host.setEditText(general_section[ConfigurationReader.GENERAL_HOST]) # port spinbox_port setValue(int) self.spinbox_port.setValue(general_section[ConfigurationReader.GENERAL_PORT]) # file_count spin_file_count setValue(int) self.spin_file_count.setValue(general_section[ConfigurationReader.GENERAL_LOG_COUNT]) # file_size spin_file_size setValue(int/1024/1024) (Byte -> MB) self.spin_file_size.setValue((general_section[ConfigurationReader.GENERAL_LOG_FILE_SIZE] / 1024.0 / 1024.0)) # path_to_file line_path_to_file setText(string) self.line_path_to_file.setText(general_section[ConfigurationReader.GENERAL_PATH_TO_FILE]) # logfile path self.line_path_to_eventfile.setText(general_section[ConfigurationReader.GENERAL_EVENTLOG_PATH]) # loglevel ll = general_section[ConfigurationReader.GENERAL_EVENTLOG_LEVEL] od = collections.OrderedDict(sorted(GUILogger._convert_level.items())) counter = 0 # TODO better way to set the combo box index? for k in od.keys(): if ll == k: break counter += 1 self.combo_loglevel.setCurrentIndex(counter) # log_to_console def __checkbox_bool_setter(bool_value): if bool_value: return 2 else: return 0 self.checkbox_to_file.setChecked( __checkbox_bool_setter(general_section[ConfigurationReader.GENERAL_EVENTLOG_TO_FILE])) # log_to_file self.checkbox_to_console.setCheckState( __checkbox_bool_setter(general_section[ConfigurationReader.GENERAL_EVENTLOG_TO_CONSOLE])) except Exception as e: EventLogger.critical("Could not read the General Section of the Config-File! -> " + str(e)) return
def _read_json_config_file(self): with codecs.open(self.fileName, 'r', 'UTF-8') as content_file: try: json_structure = json.load(content_file) except ValueError as e: EventLogger.critical("Cant parse the configuration file: " + str(e)) return # Load sections out of the json structure try: self._configuration._general = json_structure[ConfigurationReader.GENERAL_SECTION] except KeyError: EventLogger.critical("json configuration file has no [" + ConfigurationReader.GENERAL_SECTION + "] section") self._readConfigErr += 1 self._configuration._xively = prevent_key_error(json_structure, ConfigurationReader.XIVELY_SECTION) self._configuration._devices = prevent_key_error(json_structure, ConfigurationReader.DEVICES_SECTION)
def load_and_validate_config(filename): EventLogger.info('Loading config from file: {0}'.format(filename)) try: with open(filename, 'r') as f: s = f.read() config = json.loads(s) except Exception as e: EventLogger.critical('Could not parse config file as JSON: {0}'.format(e)) return None if not ConfigValidator(config).validate(): return None EventLogger.info('Config successfully loaded from: {0}'.format(filename)) return config
def validate(self): """ This function performs the validation of the various sections of the JSON configuration file """ EventLogger.info("Validating config file") self._validate_hosts() self._validate_data() self._validate_debug() self._validate_devices() if self._error_count > 0: EventLogger.critical("Validation found {0} errors".format(self._error_count)) else: EventLogger.info("Validation successful") return self._error_count == 0
def main(config_filename, gui_config, gui_job): """ This function initialize the data logger and starts the logging process """ config = None gui_start = False if config_filename != None: # started via console config = load_and_validate_config(config_filename) if config == None: return None else: # started via GUI config = gui_config gui_start = True if config['debug']['log']['enabled']: EventLogger.add_logger( FileLogger('FileLogger', log_level_name_to_id(config['debug']['log']['level']), config['debug']['log']['file_name'])) data_logger = None try: data_logger = DataLogger(config, gui_job) if data_logger.ipcon is not None: data_logger.run() if not gui_start: __exit_condition(data_logger) else: raise DataLoggerException( DataLoggerException.DL_CRITICAL_ERROR, "DataLogger did not start logging process! Please check for errors." ) except Exception as exc: EventLogger.critical(str(exc)) if gui_start: return None else: sys.exit(DataLoggerException.DL_CRITICAL_ERROR) return data_logger
def load_and_validate_config(filename): EventLogger.info('Loading config from file: {0}'.format(filename)) try: with open(filename, 'r') as f: s = f.read() config = json.loads(s) except Exception as e: EventLogger.critical( 'Could not parse config file as JSON: {0}'.format(e)) return None if not ConfigValidator(config).validate(): return None EventLogger.info('Config successfully loaded from: {0}'.format(filename)) return config
def validate(self): """ This function performs the validation of the various sections of the JSON configuration file """ EventLogger.info("Validating config file") self._validate_hosts() self._validate_data() self._validate_debug() self._validate_devices() if self._error_count > 0: EventLogger.critical("Validation found {0} errors".format( self._error_count)) else: EventLogger.info("Validation successful") return self._error_count == 0
def main(config_filename, gui_config, gui_job): """ This function initialize the data logger and starts the logging process """ config = None gui_start = False if config_filename != None: # started via console config = load_and_validate_config(config_filename) if config == None: return None else: # started via GUI config = gui_config gui_start = True if config['debug']['log']['enabled']: EventLogger.add_logger(FileLogger('FileLogger', log_level_name_to_id(config['debug']['log']['level']), config['debug']['log']['file_name'])) data_logger = None try: data_logger = DataLogger(config, gui_job) if data_logger.ipcon is not None: data_logger.run() if not gui_start: __exit_condition(data_logger) else: raise DataLoggerException(DataLoggerException.DL_CRITICAL_ERROR, "DataLogger did not start logging process! Please check for errors.") except Exception as exc: EventLogger.critical(str(exc)) if gui_start: return None else: sys.exit(DataLoggerException.DL_CRITICAL_ERROR) return data_logger
def cb_connected(self, connect_reason): if self.secret != None: try: secret = self.secret.encode('ascii') except: try: self.ipcon.disconnect() except: pass EventLogger.critical( 'Authentication secret cannot contain non-ASCII characters' ) return self.ipcon.set_auto_reconnect( False) # don't auto-reconnect on authentication error try: self.ipcon.authenticate(secret) except: try: self.ipcon.disconnect() except: pass if connect_reason == IPConnection.CONNECT_REASON_AUTO_RECONNECT: extra = ' after auto-reconnect' else: extra = '' EventLogger.critical('Could not authenticate' + extra) return self.ipcon.set_auto_reconnect(True) EventLogger.info("Successfully authenticated") self.apply_options()
def _job(self): try: # check for datalogger object if AbstractJob._job(self): return EventLogger.debug(self._job_name + " Started") while True: if not self._datalogger.data_queue[self.name].empty(): csv_data = self._get_data_from_queue() self.signalNewData.emit(csv_data) if not self._exit_flag and self._datalogger.data_queue[self.name].empty(): time.sleep(self._datalogger.job_sleep) if self._exit_flag and self._datalogger.data_queue[self.name].empty(): self._remove_from_data_queue() break except Exception as e: EventLogger.critical(self._job_name + " -.- " + str(e)) self.stop()
def _read_json_config_file(self): with codecs.open(self.fileName, 'r', 'UTF-8') as content_file: try: json_structure = json.load(content_file) except ValueError as e: EventLogger.critical("Cant parse the configuration file: " + str(e)) return # Load sections out of the json structure try: self._configuration._general = json_structure[ ConfigurationReader.GENERAL_SECTION] except KeyError: EventLogger.critical("json configuration file has no [" + ConfigurationReader.GENERAL_SECTION + "] section") self._readConfigErr += 1 self._configuration._xively = prevent_key_error( json_structure, ConfigurationReader.XIVELY_SECTION) self._configuration._devices = prevent_key_error( json_structure, ConfigurationReader.DEVICES_SECTION)
def cb_connected(self, connect_reason): if self.secret != None: try: self.secret.encode('ascii') except: try: self.ipcon.disconnect() except: pass EventLogger.critical('Authentication secret cannot contain non-ASCII characters') return self.ipcon.set_auto_reconnect(False) # don't auto-reconnect on authentication error try: self.ipcon.authenticate(self.secret) except: try: self.ipcon.disconnect() except: pass if connect_reason == IPConnection.CONNECT_REASON_AUTO_RECONNECT: extra = ' after auto-reconnect' else: extra = '' EventLogger.critical('Could not authenticate' + extra) return self.ipcon.set_auto_reconnect(True) EventLogger.info("Successfully authenticated") self.apply_options()
def main(arguments_map): """ This function initialize the data logger and starts the logging process """ EventLogger.add_logger(ConsoleLogger("ConsoleLogger", 20))#logging.info configuration = None gui_start = False try: # was started via console if CONSOLE_CONFIG_FILE in arguments_map and arguments_map[CONSOLE_CONFIG_FILE] is not None: configuration = CR(path_to_config=arguments_map[CONSOLE_CONFIG_FILE]) # was started via gui elif GUI_CONFIG in arguments_map and arguments_map[GUI_CONFIG] is not None: gui_start = True configuration = CR(configuration=arguments_map[GUI_CONFIG]) # no configuration file was given else: raise DataLoggerException(desc="Can not run data logger without a configuration.") if CONSOLE_VALIDATE_ONLY in arguments_map and arguments_map[CONSOLE_VALIDATE_ONLY]: return # activate eventlogger __manage_eventlog(configuration._configuration._general) except Exception as exc: EventLogger.critical(str(exc)) if gui_start: return None else: sys.exit(DataLoggerException.DL_CRITICAL_ERROR) if configuration._configuration.is_empty(): EventLogger.error("Configuration is empty") return None data_logger = None try: if gui_start: data_logger = DataLogger(configuration._configuration, arguments_map[GUI_ELEMENT]) else: data_logger = DataLogger(configuration._configuration) if data_logger.ipcon is not None: data_logger.run() if not gui_start: __exit_condition(data_logger) else: raise DataLoggerException(DataLoggerException.DL_CRITICAL_ERROR, "DataLogger did not start logging process! Please check for errors.") except Exception as exc: EventLogger.critical(str(exc)) if gui_start: return None else: sys.exit(DataLoggerException.DL_CRITICAL_ERROR) return data_logger
def validate_general_section(self): """ This function validates the general section out of the configuration """ global_section = self.json_config._general # self.CR.GENERAL_HOST ip address host = global_section[self.CR.GENERAL_HOST] if host is None or len(host) == 0: EventLogger.critical( self._generate_device_error_message(uid="", tier_array=[self.CR.GENERAL_SECTION, self.CR.GENERAL_HOST], msg="invalid host")) # self.CR.GENERAL_PORT port number port = global_section[self.CR.GENERAL_PORT] if not Utilities.is_valid_string(port, 1) and not (0 < port <= 65535): EventLogger.critical( self._generate_device_error_message(uid="", tier_array=[self.CR.GENERAL_SECTION, self.CR.GENERAL_PORT], msg="port should be an integer 0-65535")) # --- Datalog file --------------------------------------------- # self.CR.GENERAL_LOG_TO_FILE should be a bool and if its True then # self.CR.GENERAL_LOG_TO_FILE should be a string and a valid path if not type(global_section[self.CR.GENERAL_LOG_TO_FILE]) == bool: EventLogger.critical( self._generate_device_error_message(uid="", tier_array=[self.CR.GENERAL_SECTION, self.CR.GENERAL_LOG_TO_FILE], msg="should be a boolean")) else: if global_section[self.CR.GENERAL_LOG_TO_FILE]: if not Utilities.check_file_path_exists(global_section[self.CR.GENERAL_PATH_TO_FILE]): EventLogger.critical( self._generate_device_error_message(uid="", tier_array=[self.CR.GENERAL_SECTION, self.CR.GENERAL_PATH_TO_FILE], msg="path is not reachable")) # self.CR.GENERAL_PATH_TO_FILE if not Utilities.is_valid_string(global_section[self.CR.GENERAL_PATH_TO_FILE], 1): EventLogger.critical( self._generate_device_error_message(uid="", tier_array=[self.CR.GENERAL_SECTION, self.CR.GENERAL_PATH_TO_FILE], msg="should be a path to the file where the data will be saved")) # self.CR.GENERAL_LOG_COUNT and GENERAL_LOG_FILE_SIZE count = global_section[self.CR.GENERAL_LOG_COUNT] if not isinstance(count, int) and (not isinstance(count, float)): EventLogger.critical( self._generate_device_error_message(uid="", tier_array=[self.CR.GENERAL_SECTION, self.CR.GENERAL_LOG_COUNT], msg="should be a int or float")) size = global_section[self.CR.GENERAL_LOG_FILE_SIZE] if not isinstance(size, int) and (not isinstance(size, float)): EventLogger.critical( self._generate_device_error_message(uid="", tier_array=[self.CR.GENERAL_SECTION, self.CR.GENERAL_LOG_FILE_SIZE], msg="should be a int or float")) # --- Eventlog file --------------------------------------------- # self.CR.GENERAL_EVENTLOG_TO_FILE should be a bool and if its True then # self.CR.GENERAL_EVENTLOG_PATH should be a string and a valid path if not type(global_section[self.CR.GENERAL_EVENTLOG_TO_FILE]) == bool: EventLogger.critical( self._generate_device_error_message(uid="", tier_array=[self.CR.GENERAL_SECTION, self.CR.GENERAL_EVENTLOG_TO_FILE], msg="should be a boolean")) else: if global_section[self.CR.GENERAL_EVENTLOG_TO_FILE]: if not Utilities.is_valid_string(global_section[self.CR.GENERAL_EVENTLOG_PATH], 1): EventLogger.critical(self._generate_device_error_message(uid="", tier_array=[self.CR.GENERAL_SECTION, self.CR.GENERAL_EVENTLOG_PATH], msg="should be a path to the event file")) else: if not Utilities.check_file_path_exists(global_section[self.CR.GENERAL_EVENTLOG_PATH]): EventLogger.critical(self._generate_device_error_message(uid="", tier_array=[self.CR.GENERAL_SECTION, self.CR.GENERAL_EVENTLOG_PATH], msg="path is not reachable")) if not type(global_section[self.CR.GENERAL_EVENTLOG_TO_CONSOLE]) == bool: EventLogger.critical( self._generate_device_error_message(uid="", tier_array=[self.CR.GENERAL_SECTION, self.CR.GENERAL_EVENTLOG_TO_CONSOLE], msg="should be a boolean"))
def validate_devices_section(self): """ This function validates the devices out of the configuration file :return: """ device_definitions = Idf.DEVICE_DEFINITIONS for device in self.json_config._devices: # name blueprint = device_definitions[device[Idf.DD_NAME]] if blueprint is None: EventLogger.critical( self._generate_device_error_message( uid=device[Idf.DD_UID], tier_array=["general"], msg="no such device available")) continue # next device # uid if not Utilities.is_valid_string(device[Idf.DD_UID], 3) or device[ Idf.DD_UID] == Idf.DD_UID_DEFAULT: EventLogger.critical( self._generate_device_error_message(uid=device[Idf.DD_UID], tier_array=["general"], msg="the uid from '" + device[Idf.DD_NAME] + "' is invalid")) device_values = device[Idf.DD_VALUES] blueprint_values = blueprint[Idf.DD_VALUES] # values for device_value in device_values: logged_values = 0 if device_value not in blueprint_values: EventLogger.critical( self._generate_device_error_message( uid=device[Idf.DD_UID], tier_array=["values"], msg="invalid value " + str(device_value))) else: # interval interval = device_values[device_value][ Idf.DD_VALUES_INTERVAL] if not self._is_valid_interval(interval): EventLogger.critical( self._generate_device_error_message( uid=device[Idf.DD_UID], tier_array=["values"], msg="invalid interval " + str(interval))) # subvalue try: subvalues = device_values[device_value][ Idf.DD_SUBVALUES] for value in subvalues: if not type(subvalues[value] ) == bool: # type check for validation EventLogger.critical( self._generate_device_error_message( uid=device[Idf.DD_UID], tier_array=["values"], msg="invalid type " + str(value))) else: if subvalues[ value]: # value check for "lines per second" calculation logged_values += 1 except KeyError: if interval > 0: # just one value to log logged_values += 1 if interval > 0: self._log_space_counter.add_lines_per_second( interval / 1000 * logged_values)
def validate_general_section(self): """ This function validates the general section out of the configuration """ global_section = self.json_config._general # self.CR.GENERAL_HOST ip address host = global_section[self.CR.GENERAL_HOST] if host is None or len(host) == 0: EventLogger.critical( self._generate_device_error_message( uid="", tier_array=[self.CR.GENERAL_SECTION, self.CR.GENERAL_HOST], msg="invalid host")) # self.CR.GENERAL_PORT port number port = global_section[self.CR.GENERAL_PORT] if not Utilities.is_valid_string(port, 1) and not (0 < port <= 65535): EventLogger.critical( self._generate_device_error_message( uid="", tier_array=[self.CR.GENERAL_SECTION, self.CR.GENERAL_PORT], msg="port should be an integer 0-65535")) # --- Datalog file --------------------------------------------- # self.CR.GENERAL_LOG_TO_FILE should be a bool and if its True then # self.CR.GENERAL_LOG_TO_FILE should be a string and a valid path if not type(global_section[self.CR.GENERAL_LOG_TO_FILE]) == bool: EventLogger.critical( self._generate_device_error_message( uid="", tier_array=[ self.CR.GENERAL_SECTION, self.CR.GENERAL_LOG_TO_FILE ], msg="should be a boolean")) else: if global_section[self.CR.GENERAL_LOG_TO_FILE]: if not Utilities.check_file_path_exists( global_section[self.CR.GENERAL_PATH_TO_FILE]): EventLogger.critical( self._generate_device_error_message( uid="", tier_array=[ self.CR.GENERAL_SECTION, self.CR.GENERAL_PATH_TO_FILE ], msg="path is not reachable")) # self.CR.GENERAL_PATH_TO_FILE if not Utilities.is_valid_string( global_section[self.CR.GENERAL_PATH_TO_FILE], 1): EventLogger.critical( self._generate_device_error_message( uid="", tier_array=[ self.CR.GENERAL_SECTION, self.CR.GENERAL_PATH_TO_FILE ], msg= "should be a path to the file where the data will be saved" )) # self.CR.GENERAL_LOG_COUNT and GENERAL_LOG_FILE_SIZE count = global_section[self.CR.GENERAL_LOG_COUNT] if not isinstance(count, int) and (not isinstance(count, float)): EventLogger.critical( self._generate_device_error_message( uid="", tier_array=[ self.CR.GENERAL_SECTION, self.CR.GENERAL_LOG_COUNT ], msg="should be a int or float")) size = global_section[self.CR.GENERAL_LOG_FILE_SIZE] if not isinstance(size, int) and (not isinstance(size, float)): EventLogger.critical( self._generate_device_error_message( uid="", tier_array=[ self.CR.GENERAL_SECTION, self.CR.GENERAL_LOG_FILE_SIZE ], msg="should be a int or float")) # --- Eventlog file --------------------------------------------- # self.CR.GENERAL_EVENTLOG_TO_FILE should be a bool and if its True then # self.CR.GENERAL_EVENTLOG_PATH should be a string and a valid path if not type(global_section[self.CR.GENERAL_EVENTLOG_TO_FILE]) == bool: EventLogger.critical( self._generate_device_error_message( uid="", tier_array=[ self.CR.GENERAL_SECTION, self.CR.GENERAL_EVENTLOG_TO_FILE ], msg="should be a boolean")) else: if global_section[self.CR.GENERAL_EVENTLOG_TO_FILE]: if not Utilities.is_valid_string( global_section[self.CR.GENERAL_EVENTLOG_PATH], 1): EventLogger.critical( self._generate_device_error_message( uid="", tier_array=[ self.CR.GENERAL_SECTION, self.CR.GENERAL_EVENTLOG_PATH ], msg="should be a path to the event file")) else: if not Utilities.check_file_path_exists( global_section[self.CR.GENERAL_EVENTLOG_PATH]): EventLogger.critical( self._generate_device_error_message( uid="", tier_array=[ self.CR.GENERAL_SECTION, self.CR.GENERAL_EVENTLOG_PATH ], msg="path is not reachable")) if not type( global_section[self.CR.GENERAL_EVENTLOG_TO_CONSOLE]) == bool: EventLogger.critical( self._generate_device_error_message( uid="", tier_array=[ self.CR.GENERAL_SECTION, self.CR.GENERAL_EVENTLOG_TO_CONSOLE ], msg="should be a boolean"))
def _report_error(self, message): self._error_count += 1 EventLogger.critical(message)
def _report_error(self, message): self._error_count += 1 EventLogger.critical(message)