/
SparkMonitor.py
101 lines (87 loc) · 3.83 KB
/
SparkMonitor.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
from flask import Flask, render_template,abort
from iceke.worm import Worm
import json
from iceke.util import Util
app = Flask(__name__)
flint_url = 'http://11.11.0.64:8099/'
flint_stage_url = 'http://11.11.0.64:4041/stages/'
spark_url = 'http://11.11.0.55:8090/'
spark_stage_url = 'http://11.11.0.55:4040/stages/'
@app.route('/')
def show_spark():
return render_template('show_spark.html', ctx='ab')
@app.route('/show_error/')
def show_error():
return render_template('error.html')
@app.route('/get_json/')
def get_json():
"""
you can transform the ip of node which you want to monitor
:return:
"""
final_dict = {}
try:
spark_worm = Worm(spark_url, spark_stage_url, True, False, "Spark")
running_spark = spark_worm.get_running_spark()
if running_spark is None:
running_spark = spark_worm.get_finish_spark() #get the first finished
flint_worm = Worm(flint_url, flint_stage_url, True, False, "Flint")
running_flint = flint_worm.get_running_spark()
if running_flint is None:
running_flint = flint_worm.get_finish_spark()
except Exception, e:
print e
return None
if running_flint is None and running_spark is None:
return None
elif running_flint is None and running_spark is not None:
final_dict = {'spark': format_spark_json(running_spark), 'flint': 'none'}
elif running_flint is not None and running_spark is None:
final_dict = {'spark': 'none', 'flint': format_spark_json(running_flint)}
else:
final_dict = {'spark': format_spark_json(running_spark), 'flint': format_spark_json(running_flint)}
final_json = json.dumps(final_dict)
print(final_json)
return final_json
@app.route('/get_cpu_mem/')
def get_cpu_mem():
spark_param = Worm.get_html('http://11.11.0.56:5000/', True, 6).split('#')
spark_cpu = float(spark_param[0])
spark_mem = float(spark_param[1])
flint_param = Worm.get_html('http://11.11.0.65:5000/', True, 6).split('#')
flint_cpu = float(flint_param[0])
flint_mem = float(flint_param[1])
fuck = {'spark_cpu': spark_cpu, 'spark_mem': spark_mem, 'flint_cpu': flint_cpu, 'flint_mem': flint_mem}
print fuck
shit = json.dumps(fuck)
return shit
def format_spark_json(running_spark):
if running_spark is not None:
# spark_json = json.dumps(running_spark, default=running_spark.object2dict)
running_stages = running_spark.get_running_stages()
finished_stages = running_spark.get_finished_stages()
failed_stages = running_spark.get_failed_stages()
format_spark = {}
stages = []
for finished_stage in finished_stages:
stage_dict = {'stage_id': finished_stage.get_stage_id(), 'stage_duration': Util.format_time(
finished_stage.get_duration()), 'submit_time': finished_stage.get_submit_time(),
'tasks_percent': 100.0, 'gc_time': round(finished_stage.get_gc_time(), 1)}
stages.append(stage_dict)
stages.reverse()
for running_stage in running_stages:
stage_dict = {'stage_id': running_stage.get_stage_id(), 'stage_duration': Util.format_time(
running_stage.get_duration()), 'submit_time': running_stage.get_submit_time(),
'tasks_percent': Util.format_tasks_percent(running_stage.get_tasks_percent()),
'gc_time': round(running_stage.get_gc_time(), 1)}
stages.append(stage_dict)
format_spark['app_name'] = running_spark.get_app_name()
format_spark['total_time'] = Util.format_time(running_spark.get_total_time())
format_spark['status'] = running_spark.get_status()
format_spark['property'] = running_spark.get_property()
format_spark['stages'] = stages
return format_spark
else:
return None
if __name__ == '__main__':
app.run()