def __init__(self, app_id, info_plugin, collect_period=2, retries=10): Plugin.__init__(self, app_id, info_plugin, collect_period, retries=retries) kubernetes.config.load_kube_config() self.enable_monasca = info_plugin['graphic_metrics'] if self.enable_monasca: self.monasca = MonascaConnector() self.submission_url = info_plugin['count_jobs_url'] self.expected_time = int(info_plugin['expected_time']) self.number_of_jobs = int(info_plugin['number_of_jobs']) self.submission_time = datetime.strptime( info_plugin['submission_time'], '%Y-%m-%dT%H:%M:%S.%fGMT') self.dimensions = { 'application_id': self.app_id, 'service': 'kubejobs' } self.rds = redis.StrictRedis(host=info_plugin['redis_ip'], port=info_plugin['redis_port']) self.metric_queue = "%s:metrics" % self.app_id self.current_job_id = 0 self.b_v1 = kubernetes.client.BatchV1Api()
def __init__(self, app_id, info_plugin, retries=60): Plugin.__init__(self, app_id, info_plugin, collect_period=5, retries=retries) self.monasca = MonascaConnector() self.submission_url = info_plugin['spark_submisson_url'] self.expected_time = info_plugin['expected_time'] self.remaining_time = int(self.expected_time) self.job_expected_time = int(self.expected_time) self.number_of_jobs = int(info_plugin['number_of_jobs']) self.current_job_id = 0 self.dimensions = { 'application_id': self.app_id, 'service': 'spark-sahara' } self.conn = paramiko.SSHClient() self.conn.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.conn.connect(hostname=api.mesos_cluster_addr, username=api.mesos_username, password=api.mesos_password) self.spark_id = self._discover_id_from_spark()
def __init__(self, app_id, info_plugin, collect_period=2, retries=10, last_replicas=None): Plugin.__init__(self, app_id, info_plugin, collect_period, retries=retries) self.validate(info_plugin) self.LOG = Log(LOG_NAME, LOG_FILE) self.enable_detailed_report = info_plugin['enable_detailed_report'] self.expected_time = int(info_plugin['expected_time']) self.number_of_jobs = int(info_plugin['number_of_jobs']) self.submission_time = self.get_submission_time(info_plugin) self.dimensions = self.get_dimensions() self.rds = self.setup_redis(info_plugin) self.metric_queue = "%s:metrics" % self.app_id self.current_job_id = 0 self.job_report = JobReport(info_plugin) self.report_flag = True self.enable_generate_job_report = False self.last_replicas = last_replicas self.last_error = 0.0 self.last_progress = 0.0 kubernetes.config.load_kube_config(api.k8s_manifest) self.b_v1 = kubernetes.client.BatchV1Api() self.datasource = self.setup_datasource(info_plugin)
def __init__(self, app_id, info_plugin, collect_period=2, retries=60, monasca_conn="monasca"): Plugin.__init__(self, app_id, info_plugin, collect_period, retries=retries) if monasca_conn == "monasca": self.monasca = MonascaConnector() else: self.monasca = monasca_conn self.submission_url = info_plugin['spark_submisson_url'] self.expected_time = info_plugin['expected_time'] self.number_of_jobs = int(info_plugin['number_of_jobs']) self.job_expected_time = (float(self.expected_time) / float(self.number_of_jobs)) self.remaining_time = float(self.expected_time) self.current_job_id = 0 self.app_id = app_id self.dimensions = {'application_id': self.app_id, 'service': 'spark-sahara'} self.job_ratio = 1.0 / self.number_of_jobs self.first_submission_time = None
def __init__(self, app_id, info_plugin, collect_period=2, retries=10): Plugin.__init__(self, app_id, info_plugin, collect_period, retries=retries) self.enable_visualizer = info_plugin['enable_visualizer'] self.submission_url = info_plugin['count_jobs_url'] self.expected_time = int(info_plugin['expected_time']) self.number_of_jobs = int(info_plugin['number_of_jobs']) self.submission_time = datetime.strptime(info_plugin['submission_time'], '%Y-%m-%dT%H:%M:%S.%fGMT') self.dimensions = {'application_id': self.app_id, 'service': 'kubejobs'} self.rds = redis.StrictRedis(host=info_plugin['redis_ip'], port=info_plugin['redis_port']) self.metric_queue = "%s:metrics" % self.app_id self.current_job_id = 0 kubernetes.config.load_kube_config() self.b_v1 = kubernetes.client.BatchV1Api() if self.enable_visualizer: datasource_type = info_plugin['datasource_type'] if datasource_type == "monasca": self.datasource = MonascaConnector() elif datasource_type == "influxdb": influx_url = info_plugin['database_data']['url'] influx_port = info_plugin['database_data']['port'] database_name = info_plugin['database_data']['name'] self.datasource = InfluxConnector(influx_url, influx_port, database_name) else: print("Unknown datasource type...!")
def __init__(self, app_id, info_plugin, keypair, retries=60): Plugin.__init__(self, app_id, info_plugin, collect_period=5, retries=retries) self.app_id = app_id self.host_ip = info_plugin['host_ip'] self.keypair_path = keypair self.host_username = info_plugin['host_username'] self.log_path = info_plugin['log_path'] self.dimensions = {'app_id': self.app_id, 'host': self.host_ip} self.last_checked = '' self.monasca = MonascaConnector()
def test_init_empty_plugin(self): plugin_1 = Plugin(3) plugin_2 = Plugin(3) self.assertNotEqual(plugin_1, plugin_2) self.assertFalse(plugin_1.running) self.assertEqual(plugin_1.dimensions, {}) self.assertEqual(plugin_1.collect_period, 3) self.assertEqual(plugin_1.attempts, 30) self.assertEqual(plugin_1.app_id, None) self.assertFalse(plugin_2.running) self.assertEqual(plugin_2.dimensions, {}) self.assertEqual(plugin_2.collect_period, 3) self.assertEqual(plugin_2.attempts, 30) self.assertEqual(plugin_2.app_id, None)
def __init__(self, app_id, info_plugin, keypair, retries=60): Plugin.__init__(self, app_id, info_plugin, collect_period=5, retries=retries) self.app_id = app_id self.host_ip = info_plugin['host_ip'] self.expected_time = info_plugin['expected_time'] self.log_path = info_plugin['log_path'] self.keypair_path = keypair self.host_username = '******' self.dimensions = {"application_id": self.app_id, "host": self.host_ip} self.last_checked = '' self.start_time = time.time() self.monasca = MonascaConnector()
def test_stop_plugin(self): plugin = Plugin(3) plugin.running = True plugin.stop() self.assertFalse(plugin.running)