def nodes_get_masters_info(request): try: masters, myself = utils.masters_detail( request.args['host'], int(request.args['port'])) return base.json_result({ 'masters': masters, 'myself': { 'role': myself.role_in_cluster, 'slots': len(myself.assigned_slots), }, }) except SocketError: return base.json_result({ 'masters': [], 'myself': {'role': 'master', 'slots': 0}, })
def create_eru_proxy(request): container_info = None try: cluster = models.cluster.get_by_id(int(request.form['cluster_id'])) if cluster is None or len(cluster.nodes) == 0: raise ValueError('no such cluster') port = int(request.form.get('port', 8889)) if not 8000 <= port <= 9999: raise ValueError('invalid port') container_info = deploy_proxy( request.form['pod'], int(request.form['threads']), request.form.get('read_slave') == 'rs', request.form['netmode'], host=request.form.get('host'), port=port) models.proxy.create_eru_instance( container_info['address'], port, cluster.id, container_info['container_id']) _set_proxy_remote(container_info['address'], port, cluster.nodes[0].host, cluster.nodes[0].port) return base.json_result(container_info) except IntegrityError: if container_info is not None: rm_containers([container_info['container_id']]) raise ValueError('exists') except BaseException as exc: logging.exception(exc) raise
def fetch_stats(request): host = request.args['host'] if not PAT_HOST.match(host): raise ValueError('Invalid hostname') port = int(request.args['port']) limit = min(int(request.args['limit']), 1000) result = {} for field in RES_FIELDS: q = stats.db.client.query( '''select mean(%s) from "%s:%d" group by time(2m) limit %d''' % (field, host, port, limit)) result[field] = q[0]['points'] for field in INT_FIELDS: q = stats.db.client.query( '''select max(%s) from "%s:%d" group by time(2m) limit %d''' % (field, host, port, limit)) result[field] = q[0]['points'] for field in DERV_FIELDS: q = stats.db.client.query( 'select derivative(%s) from "%s:%d" group by time(2m) limit %d' % (field, host, port, limit)) result[field] = q[0]['points'] return base.json_result(result)
def sequence(request): m = [] now = int(time.time()) for c in proxy.list_ip(): node = '%s:%d' % (c[0], c[1]) result = stats.client.query_request(node, now) if result: m.append(result) return base.json_result(m)
def nodes_get_masters_info(request): try: masters, myself = utils.masters_detail(request.args['host'], int(request.args['port'])) return base.json_result({ 'masters': masters, 'myself': { 'role': myself.role_in_cluster, 'slots': len(myself.assigned_slots), }, }) except SocketError: return base.json_result({ 'masters': [], 'myself': { 'role': 'master', 'slots': 0 }, })
def list_clusters(request): r = [] for c in models.cluster.list_all(): if len(c.nodes) == 0: continue r.append({ 'id': c.id, 'descr': c.description, 'nodes': len(c.nodes), }) return base.json_result(r)
def node_exec_command(request): t = Talker(request.form['host'], int(request.form['port'])) try: r = t.talk(*json.loads(request.form['cmd'])) except ValueError as e: r = None if e.message == 'No reply' else ('-ERROR: ' + e.message) except ReplyError as e: r = '-' + e.message finally: t.close() return base.json_result(r)
def _simple_cmd(host, port, *command): status = 200 t = Talker(host, port) try: r = t.talk(*command) except ReplyError as e: r = '-' + e.message status = 400 finally: t.close() return base.json_result(r, status)
def fetch_stats(request): host, port, limit, interval, span = _parse_args(request.args) now = int(time.time()) node = '%s:%d' % (host, port) result = {} for field in REDIS_AVG_FIELDS: result[field] = stats.client.query(node, field, 'AVERAGE', span, now, interval) for field in REDIS_MAX_FIELDS: result[field] = stats.client.query(node, field, 'MAX', span, now, interval) return base.json_result(result)
def fetch_stats(request): host, port, limit, interval, span = _parse_args(request.args) now = datetime.utcnow() node = '%s:%d' % (host, port) result = {} for field in RES_FIELDS: result[field] = get_stats_by_node(node, field, 'mean', span, now, interval) for field in INT_FIELDS: result[field] = get_stats_by_node(node, field, 'max', span, now, interval) return base.json_result(result)
def fetch_stats(request): host, port, limit, interval, span = _parse_args(request.args) now = int(time.time()) node = '%s:%d' % (host, port) result = {} for field in REDIS_AVG_FIELDS: result[field] = stats.client.query( node, field, 'AVERAGE', span, now, interval) for field in REDIS_MAX_FIELDS: result[field] = stats.client.query( node, field, 'MAX', span, now, interval) return base.json_result(result)
def cluster_get_task_steps(request): t = models.task.get_task_by_id(int(request.args['id'])) if t is None: return base.not_found() return base.json_result([{ 'id': step.id, 'command': step.command, 'args': step.args, 'status': 'completed' if step.completed else ( 'running' if step.running else 'pending'), 'start_time': template.f_strftime(step.start_time), 'completion': template.f_strftime(step.completion), 'exec_error': step.exec_error, } for step in t.all_steps])
def cluster_get_masters_info(request): c = models.cluster.get_by_id(request.args['id']) if c is None or len(c.nodes) == 0: return base.not_found() node = c.nodes[0] master = redistrib.command.list_masters(node.host, node.port)[0] node_details = {(n['host'], n['port']): n for n in file_ipc.read()['nodes']} result = [] for n in redistrib.command.list_masters(node.host, node.port)[0]: r = {'host': n.host, 'port': n.port} if (n.host, n.port) in node_details: r['slots_count'] = len(node_details[(n.host, n.port)]['slots']) result.append(r) return base.json_result(result)
def fetch_stats(request): host = request.args['host'] if not PAT_HOST.match(host): raise ValueError('Invalid hostname') port = int(request.args['port']) limit = min(int(request.args['limit']), 720) span = timedelta(minutes=limit * 2) now = datetime.utcnow() node = '%s:%d' % (host, port) result = {} for field in RES_FIELDS: result[field] = get_stats_by_node(node, field, 'mean', span, now) for field in INT_FIELDS: result[field] = get_stats_by_node(node, field, 'max', span, now) return base.json_result(result)
def create_eru_node(request): container_info = None try: port = int(request.form.get('port', 6379)) if not 6000 <= port <= 7999: raise ValueError('invalid port') container_info = deploy_node( request.form['pod'], request.form['aof'] == 'y', request.form['netmode'], request.form['cluster'] == 'y', host=request.form.get('host'), port=port) models.node.create_eru_instance(container_info['address'], port, container_info['container_id']) return base.json_result(container_info) except IntegrityError: if container_info is not None: rm_containers([container_info['container_id']]) raise ValueError('exists') except BaseException as exc: logging.exception(exc) raise
def fetch_stats(request): host = request.args['host'] if not PAT_HOST.match(host): raise ValueError('Invalid hostname') port = int(request.args['port']) limit = min(int(request.args['limit']), 1000) result = {} for field in RES_FIELDS: q = stats.db.client.query( '''select mean(%s) from "%s:%d" group by time(2m) limit %d''' % (field, host, port, limit)) result[field] = q[0]['points'] for field in INT_FIELDS: q = stats.db.client.query( '''select max(%s) from "%s:%d" group by time(2m) limit %d''' % (field, host, port, limit)) result[field] = q[0]['points'] return base.json_result(result)
def create_eru_proxy(request): def set_remotes(proxy_addr, proxy_port, redis_host, redis_port): time.sleep(1) t = Talker(proxy_addr, proxy_port) try: t.talk('setremotes', redis_host, redis_port) finally: t.close() container_info = None try: cluster = models.cluster.get_by_id(int(request.form['cluster_id'])) if cluster is None or len(cluster.nodes) == 0: raise ValueError('no such cluster') port = int(request.form.get('port', 8889)) if not 8000 <= port <= 9999: raise ValueError('invalid port') container_info = deploy_proxy( request.form['pod'], int(request.form['threads']), request.form.get('read_slave') == 'rs', request.form['netmode'], host=request.form.get('host'), port=port) models.proxy.create_eru_instance( container_info['address'], port, cluster.id, container_info['container_id']) threading.Thread(target=set_remotes, args=( container_info['address'], port, cluster.nodes[0].host, cluster.nodes[0].port)).start() return base.json_result(container_info) except IntegrityError: if container_info is not None: rm_containers([container_info['container_id']]) raise ValueError('exists') except BaseException as exc: logging.exception(exc) raise
finally: t.close() @base.get_async('/cluster/autodiscover') def cluster_auto_discover(request): host = request.args['host'] port = int(request.args['port']) try: nodes = redistrib.command.list_nodes(host, port, host)[0] except StandardError, e: logging.exception(e) raise ValueError(e) if len(nodes) <= 1 and len(nodes[0].assigned_slots) == 0: return base.json_result({'cluster_discovered': False}) return base.json_result({ 'cluster_discovered': True, 'nodes': [{ 'host': n.host, 'port': n.port, 'role': n.role_in_cluster, 'known': nm.get_by_host_port(n.host, n.port) is not None, } for n in nodes], }) @base.post_async('/cluster/autojoin') def cluster_auto_join(request): host = request.form['host']
host = request.args['host'] port = int(request.args['port']) try: nodes = redistrib.command.list_nodes(host, port, host)[0] except StandardError, e: logging.exception(e) raise ValueError(e) unknown_nodes = [] for n in nodes: if nm.get_by_host_port(n.host, n.port) is None: unknown_nodes.append(n) return base.json_result([{ 'host': n.host, 'port': n.port, 'role': n.role_in_cluster, } for n in unknown_nodes]) @base.post_async('/cluster/autojoin') def cluster_auto_join(request): host = request.form['host'] port = int(request.form['port']) try: nodes = redistrib.command.list_nodes(host, port, host)[0] except StandardError, e: logging.exception(e) raise ValueError(e) cluster_ids = set()
def cluster_get_masters_info(request): c = models.cluster.get_by_id(request.args['id']) if c is None or len(c.nodes) == 0: return base.not_found() node = c.nodes[0] return base.json_result(utils.masters_detail(node.host, node.port)[0])
def eru_list_pod_hosts(request, pod): return base.json_result([{ 'name': r['name'], 'addr': r['addr'], } for r in eru_client.list_pod_hosts(pod) if r['is_alive']])