def start_metrics_component(self): self._dynamite_metrics = DynamiteMETRICS( self._command_line_arguments.etcd_endpoint, self._scaling_engine_metrics_communication_queue, self._exit_flag ) self._dynamite_metrics.daemon = True self._dynamite_metrics.start()
class Dynamite: DYNAMITE_ENVIRONMENT = os.getenv('DYNAMITE_ENVIRONMENT', DYNAMITE_ENVIRONMENT_STRUCT.DEVELOPMENT) DEFAULT_CONFIG_PATH = None DEFAULT_SERVICE_FOLDER = None DEFAULT_ETCD_ENDPOINT = "127.0.0.1:4001" _command_line_arguments = None _message_sender_receiver_factory = None _scaling_engine_metrics_communication_queue = None _exit_flag = None def __init__(self): self._logger = logging.getLogger("dynamite.Dynamite") self._logger.setLevel(logging.INFO) self._scaling_engine_metrics_communication_queue = None self._exit_flag = Value('i', 0) def run(self): self._logger.info("Started dynamite") self.init_env() self.init_arguments() config = self.parse_config() self.create_communication_queues() try: self.start_executor() self.start_metrics_component() self.start_scaling_engine(config) finally: self._exit_flag.value = 1 self._dynamite_metrics.join() self._dynamite_executor.join() def init_env(self): if platform.system() == 'Windows': self._logger.debug("Platform is Windows") self._logger.debug("Environment is %s", str(self.DYNAMITE_ENVIRONMENT)) if self.DYNAMITE_ENVIRONMENT == DYNAMITE_ENVIRONMENT_STRUCT.PRODUCTION: self.DEFAULT_CONFIG_PATH = 'C:\\Program Files\\Dynamite\\config.yaml' self.DEFAULT_SERVICE_FOLDER = 'C:\\Program Files\\Dynamite\\Service-Files' elif self.DYNAMITE_ENVIRONMENT == DYNAMITE_ENVIRONMENT_STRUCT.DEVELOPMENT: self.DEFAULT_CONFIG_PATH = os.path.dirname(os.path.realpath(__file__)) \ + '\\tests\\TEST_CONFIG_FOLDER\\config.yaml' self.DEFAULT_SERVICE_FOLDER = os.path.dirname(os.path.realpath(__file__)) \ + '\\tests\\TEST_CONFIG_FOLDER\\service-files' elif platform.system() == 'Linux' or platform.system() == 'Darwin': self._logger.debug("Platform is " + platform.system()) self._logger.debug("Environment is %s", str(self.DYNAMITE_ENVIRONMENT)) if self.DYNAMITE_ENVIRONMENT == DYNAMITE_ENVIRONMENT_STRUCT.PRODUCTION: self.DEFAULT_CONFIG_PATH = '/etc/dynamite/config.yaml' self.DEFAULT_SERVICE_FOLDER = '/etc/dynamite/service-files' elif self.DYNAMITE_ENVIRONMENT == DYNAMITE_ENVIRONMENT_STRUCT.DEVELOPMENT: self.DEFAULT_CONFIG_PATH = os.path.dirname(os.path.realpath(__file__)) \ + '/tests/TEST_CONFIG_FOLDER/config.yaml' self.DEFAULT_SERVICE_FOLDER = os.path.dirname(os.path.realpath(__file__)) \ + '/tests/TEST_CONFIG_FOLDER/service-files' self._logger.info("Default config path is %s", self.DEFAULT_CONFIG_PATH) self._logger.info("Default service folder path is %s", self.DEFAULT_SERVICE_FOLDER) def init_arguments(self): parser = argparse.ArgumentParser() parser.add_argument("--config_file", "-c", help="Define Config-File to be used. Default: " + self.DEFAULT_CONFIG_PATH, nargs='?', default=self.DEFAULT_CONFIG_PATH) parser.add_argument("--service_folder", "-s", help="Define Folder(s) in which dynamite searches for service-files (fleet). Default: " + self.DEFAULT_SERVICE_FOLDER + ". Can be provided multiple times", nargs='?', action='append') parser.add_argument("--etcd_endpoint", "-e", help="Define ETCD Endpoint [IP]:[PORT]. Default: " + self.DEFAULT_ETCD_ENDPOINT, nargs='?', default=self.DEFAULT_ETCD_ENDPOINT) parser.add_argument("--rabbitmq_endpoint", "-r", help="Define Rabbit-MQ Endpoint [IP]:[PORT].", nargs='?', default=None) parser.add_argument("--fleet_endpoint", "-f", help="Define Fleet Endpoint [IP]:[PORT].", nargs='?', default="127.0.0.1:49153") args = parser.parse_args() self._command_line_arguments = CommandLineArguments(args) self._command_line_arguments.log_arguments() # Test if Config-File exists. If not, terminate application if not os.path.exists(self._command_line_arguments.config_path): raise FileNotFoundError( "--config-file: {} --> File at given config-path does not exist".format( self._command_line_arguments.config_path ) ) ARG_SERVICE_FOLDER_TMP = self._command_line_arguments.service_folder if self._command_line_arguments.service_folder is not None else [self.DEFAULT_SERVICE_FOLDER] self._command_line_arguments.service_folder = [] # First test if Service-Folder(s) exist. If not, terminate application # If folder(s) exist save the absolute path for service_file_folder in ARG_SERVICE_FOLDER_TMP: if not os.path.isdir(service_file_folder): raise NotADirectoryError("--service-folder: " + service_file_folder + " --> Is not a directory") if os.path.isabs(service_file_folder): self._command_line_arguments.service_folder.append(service_file_folder) else: self._command_line_arguments.service_folder.append(os.path.abspath(service_file_folder)) self._logger.info("Using service folders %s", str(self._command_line_arguments.service_folder)) def create_communication_queues(self): communication_type = CommunicationType.InterProcessQueue service_endpoint = None if self._command_line_arguments.rabbitmq_endpoint is not None: communication_type = CommunicationType.RabbitMQ service_endpoint = ServiceEndpoint.from_string(self._command_line_arguments.rabbitmq_endpoint) self._message_sender_receiver_factory = ScalingMessageSenderReceiverFactory(communication_type) self._message_sender_receiver_factory.initialize_connection(service_endpoint=service_endpoint) self._scaling_engine_metrics_communication_queue = Queue() def parse_config(self): return DynamiteINIT(self._command_line_arguments) def start_executor(self): scaling_response_sender = self._message_sender_receiver_factory.create_response_sender() scaling_request_receiver = self._message_sender_receiver_factory.create_request_receiver() self._dynamite_executor = DynamiteEXECUTOR( scaling_request_receiver, scaling_response_sender, self._exit_flag, etcd_endpoint=self._command_line_arguments.etcd_endpoint ) self._dynamite_executor.daemon = True self._dynamite_executor.start() def start_metrics_component(self): self._dynamite_metrics = DynamiteMETRICS( self._command_line_arguments.etcd_endpoint, self._scaling_engine_metrics_communication_queue, self._exit_flag ) self._dynamite_metrics.daemon = True self._dynamite_metrics.start() def start_scaling_engine(self, config): scaling_engine_config = ScalingEngineConfiguration() scaling_engine_config.metrics_receiver = MetricsReceiver(self._scaling_engine_metrics_communication_queue) scaling_engine_config.services_dictionary = config.dynamite_service_handler.FleetServiceDict scaling_engine_config.scaling_policies = config.dynamite_config.ScalingPolicy.get_scaling_policies() scaling_engine_config.etcd_connection = config.etcdctl scaling_engine_config.executed_task_receiver = self._message_sender_receiver_factory.create_response_receiver() scaling_engine_config.scaling_action_sender = self._message_sender_receiver_factory.create_request_sender() self._scaling_engine = ScalingEngine(scaling_engine_config, self._exit_flag) self._scaling_engine.start()
def test_run(self): etcdctl_mock = FakeEtcdClient(self.etcd_content) config_mock = Mock() config_mock.ETCD = Mock() config_mock.ETCD.metrics_base_path = "/metrics" policies_mock = config_mock.ScalingPolicy policies_mock.scale_up = Mock() policies_mock.scale_up.service_type = "webserver" policies_mock.scale_up.metric = "cpu_user" policies_mock.scale_up.metric_aggregated = False policies_mock.scale_down = Mock() policies_mock.scale_down.service_type = "loadbalancer" policies_mock.scale_down.metric = "response_time.time_backend_response.p95" policies_mock.scale_down.metric_aggregated = True policies_mock.get_scaling_policies = Mock( return_value=[policies_mock.scale_up, policies_mock.scale_down]) exit_flag = Value('i', 0) result_queue = Queue() metrics_component = DynamiteMETRICS(self.ETCD_ENDPOINT, result_queue, exit_flag) metrics_component.etcdctl = etcdctl_mock metrics_component.configuration = config_mock process_simulator = ProcessSimulator(metrics_component) process_simulator.start() expected_message_count = 3 messages_by_instance_uuid = {} for message_nr in range(0, expected_message_count): result = result_queue.get() messages_by_instance_uuid[result.uuid] = result assert result_queue.empty() is True expected_message_apache1 = MetricsMessage( "webserver", "apache-uuid-1", [ MetricValue("2015-06-26T09:52:07.680Z", 29), MetricValue("2015-06-26T09:52:08.680Z", 30), MetricValue("2015-06-26T09:52:09.680Z", 31) ], "cpu_user") assert messages_by_instance_uuid[ "apache-uuid-1"] == expected_message_apache1 expected_message_apache2 = MetricsMessage( "webserver", "apache-uuid-2", [ MetricValue("2015-06-26T09:53:07.680Z", 20), MetricValue("2015-06-26T09:53:08.680Z", 21), MetricValue("2015-06-26T09:53:09.680Z", 22) ], "cpu_user") assert messages_by_instance_uuid[ "apache-uuid-2"] == expected_message_apache2 expected_message_loadbalancer = MetricsMessage( "webserver", "apache-uuid-2", [ MetricValue("2015-06-22T14:54:12.319Z", 699.0), MetricValue("2015-06-22T14:54:13.319Z", 699.0) ], "cpu_user") assert messages_by_instance_uuid[""] == expected_message_loadbalancer process_simulator.stop() process_simulator.join()
def test_run(self): etcdctl_mock = FakeEtcdClient(self.etcd_content) config_mock = Mock() config_mock.ETCD = Mock() config_mock.ETCD.metrics_base_path = "/metrics" policies_mock = config_mock.ScalingPolicy policies_mock.scale_up = Mock() policies_mock.scale_up.service_type = "webserver" policies_mock.scale_up.metric = "cpu_user" policies_mock.scale_up.metric_aggregated = False policies_mock.scale_down = Mock() policies_mock.scale_down.service_type = "loadbalancer" policies_mock.scale_down.metric = "response_time.time_backend_response.p95" policies_mock.scale_down.metric_aggregated = True policies_mock.get_scaling_policies = Mock(return_value=[policies_mock.scale_up, policies_mock.scale_down]) exit_flag = Value('i', 0) result_queue = Queue() metrics_component = DynamiteMETRICS(self.ETCD_ENDPOINT, result_queue, exit_flag) metrics_component.etcdctl = etcdctl_mock metrics_component.configuration = config_mock process_simulator = ProcessSimulator(metrics_component) process_simulator.start() expected_message_count = 3 messages_by_instance_uuid = {} for message_nr in range(0, expected_message_count): result = result_queue.get() messages_by_instance_uuid[result.uuid] = result assert result_queue.empty() is True expected_message_apache1 = MetricsMessage( "webserver", "apache-uuid-1", [ MetricValue("2015-06-26T09:52:07.680Z", 29), MetricValue("2015-06-26T09:52:08.680Z", 30), MetricValue("2015-06-26T09:52:09.680Z", 31) ], "cpu_user" ) assert messages_by_instance_uuid["apache-uuid-1"] == expected_message_apache1 expected_message_apache2 = MetricsMessage( "webserver", "apache-uuid-2", [ MetricValue("2015-06-26T09:53:07.680Z", 20), MetricValue("2015-06-26T09:53:08.680Z", 21), MetricValue("2015-06-26T09:53:09.680Z", 22) ], "cpu_user" ) assert messages_by_instance_uuid["apache-uuid-2"] == expected_message_apache2 expected_message_loadbalancer = MetricsMessage( "webserver", "apache-uuid-2", [ MetricValue("2015-06-22T14:54:12.319Z", 699.0), MetricValue("2015-06-22T14:54:13.319Z", 699.0) ], "cpu_user" ) assert messages_by_instance_uuid[""] == expected_message_loadbalancer process_simulator.stop() process_simulator.join()
class Dynamite: DYNAMITE_ENVIRONMENT = os.getenv('DYNAMITE_ENVIRONMENT', DYNAMITE_ENVIRONMENT_STRUCT.DEVELOPMENT) DEFAULT_CONFIG_PATH = None DEFAULT_SERVICE_FOLDER = None DEFAULT_ETCD_ENDPOINT = "127.0.0.1:4001" _command_line_arguments = None _message_sender_receiver_factory = None _scaling_engine_metrics_communication_queue = None _exit_flag = None def __init__(self): self._logger = logging.getLogger("dynamite.Dynamite") self._logger.setLevel(logging.INFO) self._scaling_engine_metrics_communication_queue = None self._exit_flag = Value('i', 0) def run(self): self._logger.info("Started dynamite") self.init_env() self.init_arguments() config = self.parse_config() self.create_communication_queues() try: self.start_executor() self.start_metrics_component() self.start_scaling_engine(config) finally: self._exit_flag.value = 1 self._dynamite_metrics.join() self._dynamite_executor.join() def init_env(self): if platform.system() == 'Windows': self._logger.debug("Platform is Windows") self._logger.debug("Environment is %s", str(self.DYNAMITE_ENVIRONMENT)) if self.DYNAMITE_ENVIRONMENT == DYNAMITE_ENVIRONMENT_STRUCT.PRODUCTION: self.DEFAULT_CONFIG_PATH = 'C:\\Program Files\\Dynamite\\config.yaml' self.DEFAULT_SERVICE_FOLDER = 'C:\\Program Files\\Dynamite\\Service-Files' elif self.DYNAMITE_ENVIRONMENT == DYNAMITE_ENVIRONMENT_STRUCT.DEVELOPMENT: self.DEFAULT_CONFIG_PATH = os.path.dirname(os.path.realpath(__file__)) \ + '\\tests\\TEST_CONFIG_FOLDER\\config.yaml' self.DEFAULT_SERVICE_FOLDER = os.path.dirname(os.path.realpath(__file__)) \ + '\\tests\\TEST_CONFIG_FOLDER\\service-files' elif platform.system() == 'Linux' or platform.system() == 'Darwin': self._logger.debug("Platform is " + platform.system()) self._logger.debug("Environment is %s", str(self.DYNAMITE_ENVIRONMENT)) if self.DYNAMITE_ENVIRONMENT == DYNAMITE_ENVIRONMENT_STRUCT.PRODUCTION: self.DEFAULT_CONFIG_PATH = '/etc/dynamite/config.yaml' self.DEFAULT_SERVICE_FOLDER = '/etc/dynamite/service-files' elif self.DYNAMITE_ENVIRONMENT == DYNAMITE_ENVIRONMENT_STRUCT.DEVELOPMENT: self.DEFAULT_CONFIG_PATH = os.path.dirname(os.path.realpath(__file__)) \ + '/tests/TEST_CONFIG_FOLDER/config.yaml' self.DEFAULT_SERVICE_FOLDER = os.path.dirname(os.path.realpath(__file__)) \ + '/tests/TEST_CONFIG_FOLDER/service-files' self._logger.info("Default config path is %s", self.DEFAULT_CONFIG_PATH) self._logger.info("Default service folder path is %s", self.DEFAULT_SERVICE_FOLDER) def init_arguments(self): parser = argparse.ArgumentParser() parser.add_argument("--config_file", "-c", help="Define Config-File to be used. Default: " + self.DEFAULT_CONFIG_PATH, nargs='?', default=self.DEFAULT_CONFIG_PATH) parser.add_argument( "--service_folder", "-s", help= "Define Folder(s) in which dynamite searches for service-files (fleet). Default: " + self.DEFAULT_SERVICE_FOLDER + ". Can be provided multiple times", nargs='?', action='append') parser.add_argument( "--etcd_endpoint", "-e", help="Define ETCD Endpoint [IP]:[PORT]. Default: " + self.DEFAULT_ETCD_ENDPOINT, nargs='?', default=self.DEFAULT_ETCD_ENDPOINT) parser.add_argument("--rabbitmq_endpoint", "-r", help="Define Rabbit-MQ Endpoint [IP]:[PORT].", nargs='?', default=None) parser.add_argument("--fleet_endpoint", "-f", help="Define Fleet Endpoint [IP]:[PORT].", nargs='?', default="127.0.0.1:49153") args = parser.parse_args() self._command_line_arguments = CommandLineArguments(args) self._command_line_arguments.log_arguments() # Test if Config-File exists. If not, terminate application if not os.path.exists(self._command_line_arguments.config_path): raise FileNotFoundError( "--config-file: {} --> File at given config-path does not exist" .format(self._command_line_arguments.config_path)) ARG_SERVICE_FOLDER_TMP = self._command_line_arguments.service_folder if self._command_line_arguments.service_folder is not None else [ self.DEFAULT_SERVICE_FOLDER ] self._command_line_arguments.service_folder = [] # First test if Service-Folder(s) exist. If not, terminate application # If folder(s) exist save the absolute path for service_file_folder in ARG_SERVICE_FOLDER_TMP: if not os.path.isdir(service_file_folder): raise NotADirectoryError("--service-folder: " + service_file_folder + " --> Is not a directory") if os.path.isabs(service_file_folder): self._command_line_arguments.service_folder.append( service_file_folder) else: self._command_line_arguments.service_folder.append( os.path.abspath(service_file_folder)) self._logger.info("Using service folders %s", str(self._command_line_arguments.service_folder)) def create_communication_queues(self): communication_type = CommunicationType.InterProcessQueue service_endpoint = None if self._command_line_arguments.rabbitmq_endpoint is not None: communication_type = CommunicationType.RabbitMQ service_endpoint = ServiceEndpoint.from_string( self._command_line_arguments.rabbitmq_endpoint) self._message_sender_receiver_factory = ScalingMessageSenderReceiverFactory( communication_type) self._message_sender_receiver_factory.initialize_connection( service_endpoint=service_endpoint) self._scaling_engine_metrics_communication_queue = Queue() def parse_config(self): return DynamiteINIT(self._command_line_arguments) def start_executor(self): scaling_response_sender = self._message_sender_receiver_factory.create_response_sender( ) scaling_request_receiver = self._message_sender_receiver_factory.create_request_receiver( ) self._dynamite_executor = DynamiteEXECUTOR( scaling_request_receiver, scaling_response_sender, self._exit_flag, etcd_endpoint=self._command_line_arguments.etcd_endpoint) self._dynamite_executor.daemon = True self._dynamite_executor.start() def start_metrics_component(self): self._dynamite_metrics = DynamiteMETRICS( self._command_line_arguments.etcd_endpoint, self._scaling_engine_metrics_communication_queue, self._exit_flag) self._dynamite_metrics.daemon = True self._dynamite_metrics.start() def start_scaling_engine(self, config): scaling_engine_config = ScalingEngineConfiguration() scaling_engine_config.metrics_receiver = MetricsReceiver( self._scaling_engine_metrics_communication_queue) scaling_engine_config.services_dictionary = config.dynamite_service_handler.FleetServiceDict scaling_engine_config.scaling_policies = config.dynamite_config.ScalingPolicy.get_scaling_policies( ) scaling_engine_config.etcd_connection = config.etcdctl scaling_engine_config.executed_task_receiver = self._message_sender_receiver_factory.create_response_receiver( ) scaling_engine_config.scaling_action_sender = self._message_sender_receiver_factory.create_request_sender( ) self._scaling_engine = ScalingEngine(scaling_engine_config, self._exit_flag) self._scaling_engine.start()
def start_metrics_component(self): self._dynamite_metrics = DynamiteMETRICS( self._command_line_arguments.etcd_endpoint, self._scaling_engine_metrics_communication_queue, self._exit_flag) self._dynamite_metrics.daemon = True self._dynamite_metrics.start()