def main(): global confdir, DB_HOME, cluster_name, zk_conf import argparse parser = argparse.ArgumentParser() parser.add_argument('--confdir') args = parser.parse_args() logging.basicConfig(level=logging.INFO, format='%(asctime)s %(filename)s:%(lineno)d %(levelname)s %(message)s', datefmt='%Y%m%dT%H:%M:%S') if args.confdir: confdir = args.confdir conf = get_server_conf(confdir) DB_HOME = conf['hstore']['local']['home'] mc_port = conf['server']['port'] zk_conf = conf['server']['zkpath'].split('/')[-1] cluster_name = conf['cluster_name'] logging.info("agent start, home %s", DB_HOME) me = Node("localhost:%d" % mc_port) if not os.environ.get('BOTTLE_CHILD'): # not restart when reload logging.info("start rsyncd ok: %s", start_rsyncd(me.rsync_client().port)) run(host="0.0.0.0", port=me.agent_client().port, reloader=False, debug=True)
def __init__(self, host_alias): self.host_alias = host_alias self.host = socket.gethostbyname_ex(self.host_alias)[0] self.addr = self.host + ":7905" self.node = Node(self.addr) self.web = self.node.web_client() self.server_addr = '%s:%s' % (self.host, PROXY_SERVER_PORT) self.web_addr = '%s:%s' % (self.host, PROXY_WEB_PORT) self.server = libmc.Client([self.server_addr])
def force_reload_route(cluster): zoo = ZK(cluster) router = Route.from_zk(zoo) servers = [s + ':7900' for s in get_servers(zoo)] proxies = set(zoo.proxies_get()) new_ver = zoo.route_verison_get_newest() logger.info('route newest version: %s' % new_ver) def check_new_version(webc): curr_ver = webc.get_route_version() if curr_ver != new_ver: raise Exception("%s route ver %d, not %d", webc, curr_ver, new_ver) for s in proxies: host = socket.gethostbyname_ex(Node(s).host)[0] logger.info("begin reload proxy: %s, host: %s", s, host) c = Node(s).web_client() c.reload_route(new_ver) logger.info("reload proxy %s, host: %s", s, host) time.sleep(RELOAD_INTERVAL_PROXY) check_new_version(c) for i, s in enumerate(sorted(list(servers))): logger.info("reload other server %d/%d: %s", i, len(servers), s) c = Node(s).web_client() try: c.reload_route(new_ver) except ResponsError as e: raise e except Exception as e: logger.exception("%s", e) time.sleep(RELOAD_INTERVAL_OTHER) check_new_version(c)
def __init__(self, host): self.host = host self.err = None addr = self.host + ":7900" node = Node(addr) web = node.web_client() self.mc = DBClient(addr) try: self.config = get_config(host) self.route_version = web.get_route_version() self.numbucket = self.config['NumBucket'] self.buckets_id = [ i for (i, v) in enumerate(self.config['Buckets']) if v == 1 ] self.du = get_du(self.host) self.buffer_stat = get_buffer_stat(self.host) self.lasterr_ts = get_lasterr_ts(self.host) self.stats = self.mc.stats() except Exception as e: self.err = e
class Proxy(object): def __init__(self, host_alias): self.host_alias = host_alias self.host = socket.gethostbyname_ex(self.host_alias)[0] self.addr = self.host + ":7905" self.node = Node(self.addr) self.web = self.node.web_client() self.server_addr = '%s:%s' % (self.host, PROXY_SERVER_PORT) self.web_addr = '%s:%s' % (self.host, PROXY_WEB_PORT) self.server = libmc.Client([self.server_addr]) def get_info(self, name): url = 'http://%s/%s' % (self.web_addr, name) try: data = json.loads(get_url_content(url)) except Exception: return {} return data def get_score(self): return self.get_info('score/json') def get_stats(self): stats = self.server.stats() rs = stats.values()[0] route_version = self.web.get_route_version() try: rs['web_addr'] = self.web_addr rs['host'] = self.host rs['route_version'] = route_version rs['host_alias'] = self.host_alias rs['rusage_maxrss'] = big_num(rs['rusage_maxrss'] * 1000, 1, 2) rs['start_time'] = get_start_time(rs['uptime']) rs['get'] = big_num(rs['cmd_get'], 1, 2) rs['set'] = big_num(rs['cmd_set'], 1, 2) rs['delete'] = big_num(rs['cmd_delete'], 1, 2) rs['read'] = big_num(rs['bytes_written'], 1, 2) rs['write'] = big_num(rs['bytes_read'], 1, 2) except KeyError: pass return rs def get_arc(self): return self.get_info('/api/partition')
def rsync_start(bucket): disk = request.query['disk'] size = int(request.query['size']) src = Node(request.query['src']) st = RsyncState.buckets.get(bucket) if st and st.running(): return st.summary(ERR_RSYNC_WORKING) # 如果不是在跑的,会直接覆盖 st = RsyncState(bucket, disk) st.src = src st.src_du = size if st.dstate == 2: return st.summary("not_empty") elif st.check_and_clear_dir(): st.start_rsync() return st.summary()
def check_route(zk, servers, proxies, new_ver): old_ver = zk.route_version_get() newest_ver = zk.route_verison_get_newest() state = "versions: (old %d new %d newest %d)" % (old_ver, new_ver, newest_ver) if newest_ver not in (new_ver, old_ver): raise RouteError("bad zk %s" % state) for (group, cname) in ((servers, 'servers'), (proxies, 'proxies')): news = [] others = [] for addr in group: ver = Node(addr).web_client().get_route_version() if ver != old_ver: if ver == new_ver: news.append(addr) else: others.append((addr, ver)) if len(others) > 0: raise RouteError("%s have wront vers %s: %s" %\ (cname, state, others))