def init_config(self, repo_dir): self.repo_dir = os.path.abspath(repo_dir) self.json_config = get_lb_agent_json_config(self.repo_dir) self.work_dir = os.path.abspath(os.path.join(self.repo_dir, self.json_config['work_dir'])) self.haproxy_command = self.json_config['haproxy_command'] self.verify_fields = self.json_config['verify_fields'] self.keyfile = os.path.abspath(os.path.join(self.repo_dir, self.json_config['keyfile'])) self.certfile = os.path.abspath(os.path.join(self.repo_dir, self.json_config['certfile'])) self.ca_certs = os.path.abspath(os.path.join(self.repo_dir, self.json_config['ca_certs'])) self.haproxy_pidfile = os.path.abspath(os.path.join(self.repo_dir, '../', '../', MISC.PIDFILE)) log_config = os.path.abspath(os.path.join(self.repo_dir, self.json_config['log_config'])) with open(log_config) as f: logging.config.dictConfig(yaml.load(f)) self.config_path = os.path.join(self.repo_dir, config_file) self.config = self._read_config() self.start_time = datetime.utcnow().replace(tzinfo=UTC).isoformat() self.haproxy_stats = HAProxyStats(self.config.global_['stats_socket']) RepoManager(self.repo_dir).ensure_repo_consistency() self.host = self.json_config['host'] self.port = self.json_config['port'] self.logger = logging.getLogger(self.__class__.__name__)
def __init__(self, repo_dir): self.repo_dir = os.path.abspath(repo_dir) self.json_config = get_lb_agent_json_config(self.repo_dir) self.work_dir = os.path.abspath( os.path.join(self.repo_dir, self.json_config['work_dir'])) self.haproxy_command = self.json_config['haproxy_command'] self.verify_fields = self.json_config['verify_fields'] self.keyfile = os.path.abspath( os.path.join(self.repo_dir, self.json_config['keyfile'])) self.certfile = os.path.abspath( os.path.join(self.repo_dir, self.json_config['certfile'])) self.ca_certs = os.path.abspath( os.path.join(self.repo_dir, self.json_config['ca_certs'])) self.haproxy_pidfile = os.path.abspath( os.path.join(self.repo_dir, '../', '../', MISC.PIDFILE)) log_config = os.path.abspath( os.path.join(self.repo_dir, self.json_config['log_config'])) with open(log_config) as f: logging.config.dictConfig(yaml.load(f)) self.config_path = os.path.join(self.repo_dir, config_file) self.config = self._read_config() self.start_time = datetime.utcnow().replace(tzinfo=UTC).isoformat() self.haproxy_stats = HAProxyStats(self.config.global_["stats_socket"]) RepoManager(self.repo_dir).ensure_repo_consistency() super(LoadBalancerAgent, self).__init__(host=self.json_config['host'], port=self.json_config['port'], keyfile=self.keyfile, certfile=self.certfile, ca_certs=self.ca_certs, cert_reqs=ssl.CERT_REQUIRED, verify_fields=self.verify_fields)
from __future__ import absolute_import, division, print_function, unicode_literals # stdlib import os, sys # ConcurrentLogHandler - updates stlidb's logging config on import so this needs to stay import cloghandler cloghandler = cloghandler # For pyflakes # Zato from zato.agent.load_balancer.server import LoadBalancerAgent, TLSLoadBalancerAgent from zato.common.util import get_lb_agent_json_config, parse_cmd_line_options, store_pidfile if __name__ == '__main__': repo_dir = sys.argv[1] component_dir = os.path.join(repo_dir, '..', '..') lba_config = get_lb_agent_json_config(repo_dir) # Store agent's pidfile only if we are not running in foreground options = parse_cmd_line_options(sys.argv[2]) if not options.get('fg', None): store_pidfile(component_dir, lba_config['pid_file']) lba_class = TLSLoadBalancerAgent if lba_config.get( 'is_tls_enabled', True) else LoadBalancerAgent lba = lba_class(repo_dir) lba.start_load_balancer() lba.serve_forever()
Licensed under LGPLv3, see LICENSE.txt for terms and conditions. """ from __future__ import absolute_import, division, print_function, unicode_literals # stdlib import os, sys # ConcurrentLogHandler - updates stlidb's logging config on import so this needs to stay import cloghandler cloghandler = cloghandler # For pyflakes # Zato from zato.agent.load_balancer.server import LoadBalancerAgent from zato.common.util import get_lb_agent_json_config, parse_cmd_line_options, store_pidfile if __name__ == '__main__': repo_dir = sys.argv[1] component_dir = os.path.join(repo_dir, '..', '..') # Store agent's pidfile only if we are not running in foreground options = parse_cmd_line_options(sys.argv[2]) if not options.get('fg', None): store_pidfile(component_dir, get_lb_agent_json_config(repo_dir)['pid_file']) lba = LoadBalancerAgent(repo_dir) lba.start_load_balancer() lba.serve_forever()
Copyright (C) 2018, Zato Source s.r.o. https://zato.io Licensed under LGPLv3, see LICENSE.txt for terms and conditions. """ from __future__ import absolute_import, division, print_function, unicode_literals # stdlib import os, sys # ConcurrentLogHandler - updates stlidb's logging config on import so this needs to stay import cloghandler cloghandler = cloghandler # For pyflakes # Zato from zato.agent.load_balancer.server import LoadBalancerAgent from zato.common.util import get_lb_agent_json_config, parse_cmd_line_options, store_pidfile if __name__ == '__main__': repo_dir = sys.argv[1] component_dir = os.path.join(repo_dir, '..', '..') # Store agent's pidfile only if we are not running in foreground options = parse_cmd_line_options(sys.argv[2]) if not options.get('fg', None): store_pidfile(component_dir, get_lb_agent_json_config(repo_dir)['pid_file']) lba = LoadBalancerAgent(repo_dir) lba.start_load_balancer() lba.serve_forever()