def __init__(self): self.log = de_logger.get_logger() self.config = SafeConfigParser() self.config.readfp(open('/etc/vc3/vc3-client.conf')) self.certfile = os.path.expanduser(self.config.get('netcomm','certfile')) self.keyfile = os.path.expanduser(self.config.get('netcomm', 'keyfile')) self.chainfile = os.path.expanduser(self.config.get('netcomm','chainfile'))
def __init__(self, cfg): super(LogicEngine, self).__init__(cfg) self.logger = de_logger.get_logger() self.facts = [ NamedFact(name, expr) for name, expr in cfg["facts"].iteritems() ] # Only the names of facts are really needed. We pass in the # JSON form of the whole facts dictionary until the C++ is # updated to take a list of strings. self.re = RuleEngine(json.dumps(cfg["facts"]), json.dumps(cfg["rules"]))
def _make_logger(global_config): if 'logger' not in global_config: raise RuntimeError("No logger configuration has been specified.") try: logger_config = global_config['logger'] de_logger.set_logging( log_file_name=logger_config['log_file'], max_file_size=logger_config['max_file_size'], max_backup_count=logger_config['max_backup_count'], log_level=logger_config.get('log_level', 'WARNING')) return de_logger.get_logger() except Exception as msg: raise RuntimeError(f"Failed to create log: {msg}")
def __init__(self, *args, **kwargs): if not set(must_have).issubset(set(args[0].keys())): raise RuntimeError( 'SourceProxy misconfigured. Must have %s defined' % (must_have, )) self.source_channel = args[0]['channel_name'] self.data_keys = args[0]['Dataproducts'] self.retries = args[0].get('retries', RETRIES) self.retry_to = args[0].get('retry_timeout', RETRY_TO) self.logger = de_logger.get_logger() config_manager = configmanager.ConfigManager() config_manager.load() global_config = config_manager.get_global_config() self.dataspace = dataspace.DataSpace(global_config)
def __init__(self, cfg): super(FakeCondorLE, self).__init__(cfg) self.facts = [ NamedFact(name, expr) for name, expr in cfg["facts"].iteritems() ] # Only the names of facts are really needed. We pass in the # JSON form of the whole facts dictionary until the C++ is # updated to take a list of strings. self.re = RuleEngine(json.dumps(cfg["facts"]), json.dumps(cfg["rules"])) self.log = de_logger.get_logger() self.log.debug('>>> __init__ completed with input cfg = %s' % cfg)
def _make_de_logger(global_config): if 'logger' not in global_config: raise RuntimeError("No logger configuration has been specified.") try: logger_config = global_config['logger'] de_logger.set_logging(log_level=logger_config.get('log_level', 'INFO'), file_rotate_by=logger_config.get('file_rotate_by', "size"), rotation_time_unit=logger_config.get('rotation_time_unit', 'D'), rotation_interval=logger_config.get('rotation_time_interval', 1), max_backup_count=logger_config.get('max_backup_count', 6), max_file_size=logger_config.get('max_file_size', 1000000), log_file_name=logger_config['log_file']) return de_logger.get_logger() except Exception as msg: # pragma: no cover raise RuntimeError(f"Failed to create log: {msg}")
def __init__ (self, *args, **kwargs): #kif args: #k config = args[0] #kelse: #k config = {} self.logger = de_logger.get_logger() self.schedd = htcondorlib.HTCondorSchedd() self.logger.info('__init__ completed') def produces(self): """ Return list of items produced """ self.logger.debug('>>> starting produces()') return PRODUCES
def _make_de_logger(global_config): if "logger" not in global_config: raise RuntimeError("No logger configuration has been specified.") try: logger_config = global_config["logger"] de_logger.configure_logging( log_level=logger_config.get("log_level", "INFO"), file_rotate_by=logger_config.get("file_rotate_by", "size"), rotation_time_unit=logger_config.get("rotation_time_unit", "D"), rotation_interval=logger_config.get("rotation_time_interval", 1), max_backup_count=logger_config.get("max_backup_count", 6), max_file_size=logger_config.get("max_file_size", 1000000), log_file_name=logger_config["log_file"], start_q_logger=logger_config.get("start_q_logger", "True"), ) return de_logger.get_logger() except Exception as msg: # pragma: no cover raise RuntimeError(f"Failed to create log: {msg}")
def __init__(self, name, task_manager_id, generation_id, channel_dict, global_config): """ :type task_manager_id: :obj:`int` :arg task_manager_id: Task Manager id provided by caller :type channel_dict: :obj:`dict` :arg channel_dict: channel configuration :type data_block: :obj:`~datablock.DataBlock` :arg data_block: data block """ self.dataspace = dataspace.DataSpace(global_config) self.data_block_t0 = datablock.DataBlock( self.dataspace, name, task_manager_id, generation_id) # my current data block self.name = name self.id = task_manager_id self.channel = Channel(channel_dict) self.state = multiprocessing.Value('i', BOOT) self.decision_cycle_active = False self.lock = threading.Lock() self.logger = de_logger.get_logger() self.stop = False # stop running all loops when this is True
def __init__(self, param_dict): self.logger = de_logger.get_logger() self.logger.info('__init__ completed')
def __init__(self, *args, **kwargs): self.logger = de_logger.get_logger() self.logger.info('>>> __init__ completed')
def load(self): self.last_update_time = os.stat(self.config_file).st_mtime code = None try: with open(self.config_file, "r") as f: code = "self.global_config=" + "".join(f.readlines()) if code: try: exec(code) except Exception as msg: raise RuntimeError("Configuration file {} contains errors: {}". format(self.config_file, msg)) else: raise RuntimeError( "Empty configuration file {}".format(self.config_file)) except Exception as msg: raise RuntimeError("Failed to read configuration file {} {}". format(self.config_file, msg)) if not self.logger: try: logger_config = self.global_config['logger'] de_logger.set_logging(log_file_name=logger_config['log_file'], max_file_size=logger_config['max_file_size'], max_backup_count=logger_config['max_backup_count']) self.logger = de_logger.get_logger() except Exception as msg: raise RuntimeError("Failed to create log: {}".format(msg)) """ load channels """ for direntry in os.listdir(self.channel_config_dir): if not direntry.endswith(".conf"): continue name = direntry.split('.')[0] channel_conf = os.path.join(self.channel_config_dir, direntry) try: with open(os.path.abspath(channel_conf), "r") as f: code = "self.channels[name]=" + "".join(f.readlines()) try: exec(code) except Exception as msg: self.logger.error("Channel configuration file {} \ contains error {}, SKIPPING". format(channel_conf, msg)) continue except Exception as msg: self.logger.error("Failed to open channel configuration file {} \ contains error {}, SKIPPING". format(channel_conf, msg)) """ check that channel configuration contains necessary keys if keys are missing channel is removed and error is printed """ try: self.validate_channel(self.channels[name]) except Exception as msg: self.logger.error( "{} {}, REMOVING the channel".format(name, msg)) del self.channels[name] continue
exec(code) except Exception, msg: raise RuntimeError("Configuration file {} contains errors: {}". format(self.config_file, str(msg))) else: raise RuntimeError("Empty configuration file {}".format(self.config_file)) except Exception, msg: raise RuntimeError("Failed to read configuration file {} {}". format(self.config_file, str(msg))) if not self.logger: try: de_logger.set_logging(log_file_name=self.global_config['logger']['log_file'], max_file_size=self.global_config['logger']['max_file_size'], max_backup_count=self.global_config['logger']['max_backup_count']) self.logger = de_logger.get_logger() except Exception, msg: raise RuntimeError("Failed to create log: {}".format(str(msg))) """ load channels """ for direntry in os.listdir(self.channel_config_dir): if not direntry.endswith(".conf"): continue name = direntry.split('.')[0] channel_conf = os.path.join(self.channel_config_dir, direntry) try: with open(os.path.abspath(channel_conf), "r") as f: