def start(self, nodes, bucket, pnames, name, frequency, client_id='', collect_server_stats=True): self._task = { "state": "running", "threads": [], "name": name, "time": time.time(), "ops": [], "totalops": [], "ops-temp": [], "latency": {}, "data_size_stats": [] } rest = RestConnection(nodes[0]) info = rest.get_nodes_self() self.data_path = info.storage[0].get_data_path() self.client_id = str(client_id) if collect_server_stats: mbstats_thread = Thread(target=self.membase_stats, args=(nodes, bucket, frequency, self._verbosity)) mbstats_thread.start() sysstats_thread = Thread(target=self.system_stats, args=(nodes, pnames, frequency, self._verbosity)) sysstats_thread.start() diskstats_thread = Thread(target=self.disk_stats, args=(nodes, frequency, self._verbosity)) diskstats_thread.start() ns_server_stats_thread = Thread(target=self.ns_server_stats, args=([nodes[0]], bucket, 10, self._verbosity)) ns_server_stats_thread.start() rest = RestConnection(nodes[0]) bucket_size_thead = Thread(target=self.get_bucket_size, args=(bucket, rest, frequency)) bucket_size_thead.start() #data_size_thread = Thread(target=self.get_data_file_size, # args=(nodes, 60, bucket)) #data_size_thread.start() self._task["threads"] = [ sysstats_thread, ns_server_stats_thread, bucket_size_thead, mbstats_thread ] #data_size_thread ] # Getting build/machine stats from only one node in the cluster self.build_stats(nodes) self.machine_stats(nodes)
def __init__(self, host=None, port=None, username=None, password=None): """Initialize memcache and couchbase clients""" server = { 'ip': host or settings.COUCHBASE['HOST'], 'port': port or settings.COUCHBASE['PORT'], 'username': username or settings.COUCHBASE['USER'], 'password': password or settings.COUCHBASE['PASSWORD'] } self.rest_client = RestConnection(server) self.mc_client = memcache.Client([server['ip'] + ':11211'], debug=0)
def proxy_client(server, bucket): #for this bucket on this node what is the proxy ? rest = RestConnection(server) bucket_info = rest.get_bucket(bucket) nodes = bucket_info.nodes for node in nodes: if node.ip == server.ip and int(node.port) == int(server.port): client = MemcachedClient(server.ip, node.moxi) vBuckets = RestConnection(server).get_vbuckets(bucket) client.vbucket_count = len(vBuckets) if bucket_info.authType == "sasl": client.sasl_auth_plain(bucket_info.name.encode('ascii'), bucket_info.saslPassword.encode('ascii')) return client raise Exception("unable to find {0} in get_nodes()".format(server.ip))
def __init__(self, url, bucket, password="", verbose=False): self.log = logger.logger("VBucketAwareMemcachedClient") self.bucket = bucket self.rest_username = bucket self.rest_password = password self._memcacheds = {} self._vBucketMap = {} self._vBucketMap_lock = Lock() self._vBucketMapFastForward = {} self._vBucketMapFastForward_lock = Lock() #TODO: use regular expressions to parse the url server = {} if not bucket: raise InvalidArgumentException("bucket can not be an empty string", parameters="bucket") if not url: raise InvalidArgumentException("url can not be an empty string", parameters="url") if url.find("http://") != -1 and url.rfind(":") != -1 and url.find("/pools/default") != -1: server["ip"] = url[url.find("http://") + len("http://"):url.rfind(":")] server["port"] = url[url.rfind(":") + 1:url.find("/pools/default")] server["username"] = self.rest_username server["password"] = self.rest_password self.servers = [server] self.servers_lock = Lock() self.rest = RestConnection(server) self.reconfig_vbucket_map() self.init_vbucket_connections() self.dispatcher = CommandDispatcher(self) self.dispatcher_thread = Thread(name="dispatcher-thread", target=self._start_dispatcher) self.dispatcher_thread.daemon = True self.dispatcher_thread.start() self.streaming_thread = Thread(name="streaming", target=self._start_streaming, args=()) self.streaming_thread.daemon = True self.streaming_thread.start() self.verbose = verbose
def disk_stats(self, nodes, frequency, verbosity=False): shells = [] for node in nodes: try: bucket = RestConnection(node).get_buckets()[0].name MemcachedClientHelper.direct_client(node, bucket) shells.append(RemoteMachineShellConnection(node)) except: pass d = {"snapshots": []} # "pname":"x","pid":"y","snapshots":[{"time":time,"value":value}] start_time = str(self._task["time"]) while not self._aborted(): time.sleep(frequency) i = 0 for shell in shells: node = nodes[i] unique_id = node.ip + '-' + start_time value = self._extrace_disk_states(shell) value["unique_id"] = unique_id value["time"] = time.time() value["ip"] = node.ip d["snapshots"].append(value) i += 1 self._task["diskstats"] = d["snapshots"] self.log.info(" finished diskstats")
class CbClient: """Abstract couchbase client""" def __init__(self, host=None, port=None, username=None, password=None): """Initialize memcache and couchbase clients""" server = {'ip': host or settings.COUCHBASE['HOST'], 'port': port or settings.COUCHBASE['PORT'], 'username': username or settings.COUCHBASE['USER'], 'password': password or settings.COUCHBASE['PASSWORD']} self.rest_client = RestConnection(server) self.mc_client = memcache.Client([server['ip'] + ':11211'], debug=0) def insert(self, test_id, doc={}): doc = json.dumps(doc) return self.mc_client.set(str(test_id), doc) def find(self, test_id): try: doc = self.mc_client.get(str(test_id)) return json.loads(doc) except (TypeError, ValueError) as error: print error return {} def update(self, test_id, doc={}): current = self.find(test_id) current.update(doc) return self.insert(test_id, current) def query(self, bucket='default', ddoc='', view='', params=[], limit=5000): return self.rest_client.view_results(bucket, ddoc, view, params, limit)
def _rest(self): self.servers_lock.acquire() server_info = deepcopy(self.servers[0]) self.servers_lock.release() server_info['username'] = self.rest_username server_info['password'] = self.rest_password rest = RestConnection(server_info) return rest
def __init__(self, host=None, port=None, username=None, password=None): """Initialize memcache and couchbase clients""" server = {'ip': host or settings.COUCHBASE['HOST'], 'port': port or settings.COUCHBASE['PORT'], 'username': username or settings.COUCHBASE['USER'], 'password': password or settings.COUCHBASE['PASSWORD']} self.rest_client = RestConnection(server) self.mc_client = memcache.Client([server['ip'] + ':11211'], debug=0)
def __init__(self, url, bucket, password="", verbose=False): self.log = logger.logger("VBucketAwareMemcachedClient") self.bucket = bucket self.rest_username = bucket self.rest_password = password self._memcacheds = {} self._vBucketMap = {} self._vBucketMap_lock = Lock() self._vBucketMapFastForward = {} self._vBucketMapFastForward_lock = Lock() #TODO: use regular expressions to parse the url server = {} if not bucket: raise InvalidArgumentException("bucket can not be an empty string", parameters="bucket") if not url: raise InvalidArgumentException("url can not be an empty string", parameters="url") if url.find("http://") != -1 and url.rfind(":") != -1 and url.find( "/pools/default") != -1: server["ip"] = url[url.find("http://") + len("http://"):url.rfind(":")] server["port"] = url[url.rfind(":") + 1:url.find("/pools/default")] server["username"] = self.rest_username server["password"] = self.rest_password self.servers = [server] self.servers_lock = Lock() self.rest = RestConnection(server) self.reconfig_vbucket_map() self.init_vbucket_connections() self.dispatcher = CommandDispatcher(self) self.dispatcher_thread = Thread(name="dispatcher-thread", target=self._start_dispatcher) self.dispatcher_thread.daemon = True self.dispatcher_thread.start() self.streaming_thread = Thread(name="streaming", target=self._start_streaming, args=()) self.streaming_thread.daemon = True self.streaming_thread.start() self.verbose = verbose
def start(self, nodes, bucket, pnames, name, frequency, client_id='', collect_server_stats=True): self._task = {"state": "running", "threads": [], "name": name, "time": time.time(), "ops": [], "totalops": [], "ops-temp": [], "latency": {}, "data_size_stats": []} rest = RestConnection(nodes[0]) info = rest.get_nodes_self() self.data_path = info.storage[0].get_data_path() self.client_id = str(client_id) if collect_server_stats: mbstats_thread = Thread(target=self.membase_stats, args=(nodes, bucket, frequency, self._verbosity)) mbstats_thread.start() sysstats_thread = Thread(target=self.system_stats, args=(nodes, pnames, frequency, self._verbosity)) sysstats_thread.start() diskstats_thread = Thread(target=self.disk_stats, args=(nodes, frequency, self._verbosity)) diskstats_thread.start() ns_server_stats_thread = Thread(target=self.ns_server_stats, args=([nodes[0]], bucket, 10, self._verbosity)) ns_server_stats_thread.start() rest = RestConnection(nodes[0]) bucket_size_thead = Thread(target=self.get_bucket_size, args=(bucket, rest, frequency)) bucket_size_thead.start() #data_size_thread = Thread(target=self.get_data_file_size, # args=(nodes, 60, bucket)) #data_size_thread.start() self._task["threads"] = [sysstats_thread, ns_server_stats_thread, bucket_size_thead, mbstats_thread] #data_size_thread ] # Getting build/machine stats from only one node in the cluster self.build_stats(nodes) self.machine_stats(nodes)
class CbClient: """Abstract couchbase client""" def __init__(self, host=None, port=None, username=None, password=None): """Initialize memcache and couchbase clients""" server = { 'ip': host or settings.COUCHBASE['HOST'], 'port': port or settings.COUCHBASE['PORT'], 'username': username or settings.COUCHBASE['USER'], 'password': password or settings.COUCHBASE['PASSWORD'] } self.rest_client = RestConnection(server) self.mc_client = memcache.Client([server['ip'] + ':11211'], debug=0) def insert(self, test_id, doc={}): doc = json.dumps(doc) return self.mc_client.set(str(test_id), doc) def find(self, test_id): try: doc = self.mc_client.get(str(test_id)) return json.loads(doc) except (TypeError, ValueError) as error: print error return {} def update(self, test_id, doc={}): current = self.find(test_id) current.update(doc) return self.insert(test_id, current) def query(self, bucket='default', ddoc='', view='', params=[], limit=5000): return self.rest_client.view_results(bucket, ddoc, view, params, limit) def close(self): try: self.mc_client.close() except: pass
def system_stats(self, nodes, pnames, frequency, verbosity=False): try: shells = [] for node in nodes: try: bucket = RestConnection(node).get_buckets()[0].name MemcachedClientHelper.direct_client(node, bucket) shells.append(RemoteMachineShellConnection(node)) except: pass d = {"snapshots": []} # "pname":"x","pid":"y","snapshots":[{"time":time,"value":value}] start_time = str(self._task["time"]) while not self._aborted(): time.sleep(frequency) current_time = time.time() i = 0 for shell in shells: node = nodes[i] unique_id = node.ip + '-' + start_time for pname in pnames: obj = RemoteMachineHelper(shell).is_process_running( pname) if obj and obj.pid: value = self._extract_proc_info(shell, obj.pid) value["name"] = pname value["id"] = obj.pid value["unique_id"] = unique_id value["time"] = current_time value["ip"] = node.ip d["snapshots"].append(value) i += 1 self._task["systemstats"] = d["snapshots"] self.log.info(" finished system_stats") except: self._aborted()
def build_info(node): rest = RestConnection(node) api = rest.baseUrl + 'nodes/self' status, content = rest._http_request(api) json_parsed = json.loads(content) return json_parsed
def membase_stats(self, nodes, bucket, frequency, verbose=False): try: mcs = [] for node in nodes: try: bucket = RestConnection(node).get_buckets()[0].name mcs.append( MemcachedClientHelper.direct_client(node, bucket)) except: pass self._task["membasestats"] = [] self._task["timings"] = [] self._task["dispatcher"] = [] self._task["tap"] = [] self._task["checkpoint"] = [] d = {} # "pname":"x","pid":"y","snapshots":[{"time":time,"value":value}] for mc in mcs: d[mc.host] = { "snapshots": [], "timings": [], "dispatcher": [], "tap": [], "checkpoint": [] } while not self._aborted(): time_left = frequency # at minimum we want to check for aborted every minute while not self._aborted() and time_left > 0: time.sleep(min(time_left, 60)) if time_left >= 60: time_left -= 60 else: time_left = 0 for mc in mcs: stats = mc.stats() stats["time"] = time.time() stats["ip"] = mc.host d[mc.host]["snapshots"].append(stats) timings = mc.stats('timings') d[mc.host]["timings"].append(timings) dispatcher = mc.stats('dispatcher') d[mc.host]["dispatcher"].append(dispatcher) tap = mc.stats('tap') d[mc.host]["tap"].append(tap) checkpoint = mc.stats('checkpoint') d[mc.host]["checkpoint"].append(checkpoint) print len(d[mc.host]["checkpoint"]) start_time = str(self._task["time"]) for mc in mcs: ip = mc.host unique_id = ip + '-' + start_time current_time = time.time() for snapshot in d[mc.host]["snapshots"]: snapshot['unique_id'] = unique_id snapshot['time'] = current_time snapshot['ip'] = ip self._task["membasestats"].append(snapshot) for timing in d[mc.host]["timings"]: timing['unique_id'] = unique_id timing['time'] = current_time timing['ip'] = ip self._task["timings"].append(timing) for dispatcher in d[mc.host]["dispatcher"]: dispatcher['unique_id'] = unique_id dispatcher['time'] = current_time dispatcher['ip'] = ip self._task["dispatcher"].append(dispatcher) for tap in d[mc.host]["tap"]: tap['unique_id'] = unique_id tap['time'] = current_time tap['ip'] = ip self._task["tap"].append(tap) for checkpoint in d[mc.host]["checkpoint"]: checkpoint['unique_id'] = unique_id checkpoint['time'] = current_time checkpoint['ip'] = ip self._task["checkpoint"].append(checkpoint) self.log.info(" finished membase_stats") except: self._aborted()
class VBucketAwareCouchbaseClient(object): #poll server every few seconds to see if the vbucket-map #has changes def __init__(self, url, bucket, password="", verbose=False): self.log = logger.logger("VBucketAwareMemcachedClient") self.bucket = bucket self.rest_username = bucket self.rest_password = password self._memcacheds = {} self._vBucketMap = {} self._vBucketMap_lock = Lock() self._vBucketMapFastForward = {} self._vBucketMapFastForward_lock = Lock() #TODO: use regular expressions to parse the url server = {} if not bucket: raise InvalidArgumentException("bucket can not be an empty string", parameters="bucket") if not url: raise InvalidArgumentException("url can not be an empty string", parameters="url") if url.find("http://") != -1 and url.rfind(":") != -1 and url.find("/pools/default") != -1: server["ip"] = url[url.find("http://") + len("http://"):url.rfind(":")] server["port"] = url[url.rfind(":") + 1:url.find("/pools/default")] server["username"] = self.rest_username server["password"] = self.rest_password self.servers = [server] self.servers_lock = Lock() self.rest = RestConnection(server) self.reconfig_vbucket_map() self.init_vbucket_connections() self.dispatcher = CommandDispatcher(self) self.dispatcher_thread = Thread(name="dispatcher-thread", target=self._start_dispatcher) self.dispatcher_thread.daemon = True self.dispatcher_thread.start() self.streaming_thread = Thread(name="streaming", target=self._start_streaming, args=()) self.streaming_thread.daemon = True self.streaming_thread.start() self.verbose = verbose def _start_dispatcher(self): self.dispatcher.dispatch() def _start_streaming(self): # this will dynamically update vBucketMap, vBucketMapFastForward, servers urlopener = urllib.FancyURLopener() urlopener.prompt_user_passwd = lambda host, realm: (self.rest_username, self.rest_password) current_servers = True while current_servers: self.servers_lock.acquire() current_servers = deepcopy(self.servers) self.servers_lock.release() for server in current_servers: response = urlopener.open("http://{0}:{1}/pools/default/bucketsStreaming/{2}".format(server["ip"], server["port"], self.bucket)) while response: try: line = response.readline() if not line: # try next server if we get an EOF response.close() break except: # try next server if we fail to read response.close() break try: data = json.loads(line) except: continue serverlist = data['vBucketServerMap']['serverList'] vbucketmapfastforward = {} index = 0 if 'vBucketMapForward' in data['vBucketServerMap']: for vbucket in data['vBucketServerMap']['vBucketMapForward']: vbucketmapfastforward[index] = serverlist[vbucket[0]] index += 1 self._vBucketMapFastForward_lock.acquire() self._vBucketMapFastForward = deepcopy(vbucketmapfastforward) self._vBucketMapFastForward_lock.release() vbucketmap = {} index = 0 for vbucket in data['vBucketServerMap']['vBucketMap']: vbucketmap[index] = serverlist[vbucket[0]] index += 1 # only update vBucketMap if we don't have a fastforward # on a not_mb_vbucket error, we already update the # vBucketMap from the fastforward map if not vbucketmapfastforward: self._vBucketMap_lock.acquire() self._vBucketMap = deepcopy(vbucketmap) self._vBucketMap_lock.release() new_servers = [] nodes = data["nodes"] for node in nodes: if node["clusterMembership"] == "active" and node["status"] == "healthy": hostport = node["hostname"] new_servers.append({"ip":hostport.split(":")[0], "port":int(hostport.split(":")[1]), "username":self.rest_username, "password":self.rest_password}) new_servers.sort() self.servers_lock.acquire() self.servers = deepcopy(new_servers) self.servers_lock.release() def init_vbucket_connections(self): # start up all vbucket connections self._vBucketMap_lock.acquire() vbucketcount = len(self._vBucketMap) self._vBucketMap_lock.release() for i in range(vbucketcount): self.start_vbucket_connection(i) def start_vbucket_connection(self,vbucket): self._vBucketMap_lock.acquire() server = deepcopy(self._vBucketMap[vbucket]) self._vBucketMap_lock.release() serverIp, serverPort = server.split(":") if not server in self._memcacheds: self._memcacheds[server] = MemcachedClientHelper.direct_client(self.rest, serverIp, serverPort, self.bucket) def start_vbucket_fastforward_connection(self,vbucket): self._vBucketMapFastForward_lock.acquire() if not vbucket in self._vBucketMapFastForward: self._vBucketMapFastForward_lock.release() return server = deepcopy(self._vBucketMapFastForward[vbucket]) self._vBucketMapFastForward_lock.release() serverIp, serverPort = server.split(":") if not server in self._memcacheds: self._memcacheds[server] = MemcachedClientHelper.direct_client(self.rest, serverIp, serverPort, self.bucket) def restart_vbucket_connection(self,vbucket): self._vBucketMap_lock.acquire() server = deepcopy(self._vBucketMap[vbucket]) self._vBucketMap_lock.release() serverIp, serverPort = server.split(":") if server in self._memcacheds: self._memcacheds[server].close() self._memcacheds[server] = MemcachedClientHelper.direct_client(self.rest, serverIp, serverPort, self.bucket) def reconfig_vbucket_map(self, vbucket=-1): vb_ready = RestHelper(self.rest).vbucket_map_ready(self.bucket, 60) if not vb_ready: raise Exception("vbucket map is not ready for bucket {0}".format(self.bucket)) vBuckets = self.rest.get_vbuckets(self.bucket) self.vbucket_count = len(vBuckets) bucket_info = self.rest.get_bucket(self.bucket) nodes = bucket_info.nodes self._vBucketMap_lock.acquire() for vBucket in vBuckets: if vBucket.id == vbucket or vbucket == -1: self._vBucketMap[vBucket.id] = vBucket.master self._vBucketMap_lock.release() def memcached(self, key, fastforward=False): self._vBucketMap_lock.acquire() self._vBucketMapFastForward_lock.acquire() vBucketId = crc32.crc32_hash(key) & (len(self._vBucketMap) - 1) if fastforward and vBucketId in self._vBucketMapFastForward: # only try the fastforward if we have an entry # otherwise we just wait for the main map to update self.start_vbucket_fastforward_connection(vBucketId) self._vBucketMap[vBucketId] = self._vBucketMapFastForward[vBucketId] if vBucketId not in self._vBucketMap: msg = "vbucket map does not have an entry for vb : {0}" self._vBucketMapFastForward_lock.release() self._vBucketMap_lock.release() raise Exception(msg.format(vBucketId)) if self._vBucketMap[vBucketId] not in self._memcacheds: msg = "smart client does not have a mc connection for server : {0}" self._vBucketMapFastForward_lock.release() self._vBucketMap_lock.release() raise Exception(msg.format(self._vBucketMap[vBucketId])) r = self._memcacheds[self._vBucketMap[vBucketId]] self._vBucketMapFastForward_lock.release() self._vBucketMap_lock.release() return r def vbucketid(self, key): self._vBucketMap_lock.acquire() r = crc32.crc32_hash(key) & (len(self._vBucketMap) - 1) self._vBucketMap_lock.release() return r def done(self): if self.dispatcher: self.dispatcher.shutdown() if self.verbose: self.log.info("dispatcher shutdown invoked") [self._memcacheds[ip].close() for ip in self._memcacheds] if self.verbose: self.log.info("closed all memcached open connections") self.dispatcher = None def _respond(self, item, event): timeout = 30 event.wait(timeout) if not event.is_set(): # if we timeout, then try to reconnect to the server # responsible for this vbucket self.restart_vbucket_connection(self.vbucketid(item['key'])) raise MemcachedTimeoutException(item, timeout) if "error" in item["response"]: raise item["response"]["error"] return item["response"]["return"] def get(self, key): event = Event() item = {"operation": "get", "key": key, "event": event, "response": {}} self.dispatcher.put(item) return self._respond(item, event) def gat(self, key, expiry): event = Event() item = {"operation": "gat", "key": key, "expiry": expiry, "event": event, "response": {}} self.dispatcher.put(item) return self._respond(item, event) def touch(self, key, expiry): event = Event() item = {"operation": "touch", "key": key, "expiry": expiry, "event": event, "response": {}} self.dispatcher.put(item) return self._respond(item, event) def cas(self, key, expiry, flags, old_value, value): event = Event() item = {"operation": "cas", "key": key, "expiry": expiry, "flags": flags, "old_value": old_value, "value": value , "event": event, "response": {}} self.dispatcher.put(item) return self._respond(item, event) def decr(self, key, amount=1, init=0, expiry=0): event = Event() item = {"operation": "decr", "key": key, "amount": amount, "init": init, "expiry": expiry, "event": event, "response": {}} self.dispatcher.put(item) return self._respond(item, event) def set(self, key, expiry, flags, value): event = Event() item = {"operation": "set", "key": key, "expiry": expiry, "flags": flags, "value": value, "event": event, "response": {}} self.dispatcher.put(item) return self._respond(item, event) def add(self, key, expiry, flags, value): event = Event() item = {"operation": "add", "key": key, "expiry": expiry, "flags": flags, "value": value, "event": event, "response": {}} self.dispatcher.put(item) return self._respond(item, event) def delete(self, key, cas=0): event = Event() item = {"operation": "delete", "key": key, "cas": cas, "event": event, "response": {}} self.dispatcher.put(item) return self._respond(item, event) def prepend(self, key, value, cas=0): event = Event() item = {"operation": "prepend", "key": key, "cas": cas, "value": value, "event": event, "response": {}} self.dispatcher.put(item) return self._respond(item, event) def getl(self, key, expiry=15): event = Event() item = {"operation": "getl", "key": key, "expiry": expiry, "event": event, "response": {}} self.dispatcher.put(item) return self._respond(item, event) def replace(self, key, expiry, flags, value): event = Event() item = {"operation": "replace", "key": key, "expiry": expiry, "flags": flags, "value": value, "event": event, "response": {}} self.dispatcher.put(item) return self._respond(item, event) def incr(self, key, amount=1, init=0, expiry=0): event = Event() item = {"operation": "incr", "key": key, "amount": amount, "init": init, "expiry": expiry, "event": event, "response": {}} self.dispatcher.put(item) return self._respond(item, event)
class VBucketAwareCouchbaseClient(object): #poll server every few seconds to see if the vbucket-map #has changes def __init__(self, url, bucket, password="", verbose=False): self.log = logger.logger("VBucketAwareMemcachedClient") self.bucket = bucket self.rest_username = bucket self.rest_password = password self._memcacheds = {} self._vBucketMap = {} self._vBucketMap_lock = Lock() self._vBucketMapFastForward = {} self._vBucketMapFastForward_lock = Lock() #TODO: use regular expressions to parse the url server = {} if not bucket: raise InvalidArgumentException("bucket can not be an empty string", parameters="bucket") if not url: raise InvalidArgumentException("url can not be an empty string", parameters="url") if url.find("http://") != -1 and url.rfind(":") != -1 and url.find( "/pools/default") != -1: server["ip"] = url[url.find("http://") + len("http://"):url.rfind(":")] server["port"] = url[url.rfind(":") + 1:url.find("/pools/default")] server["username"] = self.rest_username server["password"] = self.rest_password self.servers = [server] self.servers_lock = Lock() self.rest = RestConnection(server) self.reconfig_vbucket_map() self.init_vbucket_connections() self.dispatcher = CommandDispatcher(self) self.dispatcher_thread = Thread(name="dispatcher-thread", target=self._start_dispatcher) self.dispatcher_thread.daemon = True self.dispatcher_thread.start() self.streaming_thread = Thread(name="streaming", target=self._start_streaming, args=()) self.streaming_thread.daemon = True self.streaming_thread.start() self.verbose = verbose def _start_dispatcher(self): self.dispatcher.dispatch() def _start_streaming(self): # this will dynamically update vBucketMap, vBucketMapFastForward, servers urlopener = urllib.FancyURLopener() urlopener.prompt_user_passwd = lambda host, realm: (self.rest_username, self.rest_password) current_servers = True while current_servers: self.servers_lock.acquire() current_servers = deepcopy(self.servers) self.servers_lock.release() for server in current_servers: response = urlopener.open( "http://{0}:{1}/pools/default/bucketsStreaming/{2}".format( server["ip"], server["port"], self.bucket)) while response: try: line = response.readline() if not line: # try next server if we get an EOF response.close() break except: # try next server if we fail to read response.close() break try: data = json.loads(line) except: continue serverlist = data['vBucketServerMap']['serverList'] vbucketmapfastforward = {} index = 0 if 'vBucketMapForward' in data['vBucketServerMap']: for vbucket in data['vBucketServerMap'][ 'vBucketMapForward']: vbucketmapfastforward[index] = serverlist[ vbucket[0]] index += 1 self._vBucketMapFastForward_lock.acquire() self._vBucketMapFastForward = deepcopy( vbucketmapfastforward) self._vBucketMapFastForward_lock.release() vbucketmap = {} index = 0 for vbucket in data['vBucketServerMap']['vBucketMap']: vbucketmap[index] = serverlist[vbucket[0]] index += 1 # only update vBucketMap if we don't have a fastforward # on a not_mb_vbucket error, we already update the # vBucketMap from the fastforward map if not vbucketmapfastforward: self._vBucketMap_lock.acquire() self._vBucketMap = deepcopy(vbucketmap) self._vBucketMap_lock.release() new_servers = [] nodes = data["nodes"] for node in nodes: if node["clusterMembership"] == "active" and node[ "status"] == "healthy": hostport = node["hostname"] new_servers.append({ "ip": hostport.split(":")[0], "port": int(hostport.split(":")[1]), "username": self.rest_username, "password": self.rest_password }) new_servers.sort() self.servers_lock.acquire() self.servers = deepcopy(new_servers) self.servers_lock.release() def init_vbucket_connections(self): # start up all vbucket connections self._vBucketMap_lock.acquire() vbucketcount = len(self._vBucketMap) self._vBucketMap_lock.release() for i in range(vbucketcount): self.start_vbucket_connection(i) def start_vbucket_connection(self, vbucket): self._vBucketMap_lock.acquire() server = deepcopy(self._vBucketMap[vbucket]) self._vBucketMap_lock.release() serverIp, serverPort = server.split(":") if not server in self._memcacheds: self._memcacheds[server] = MemcachedClientHelper.direct_client( self.rest, serverIp, serverPort, self.bucket) def start_vbucket_fastforward_connection(self, vbucket): self._vBucketMapFastForward_lock.acquire() if not vbucket in self._vBucketMapFastForward: self._vBucketMapFastForward_lock.release() return server = deepcopy(self._vBucketMapFastForward[vbucket]) self._vBucketMapFastForward_lock.release() serverIp, serverPort = server.split(":") if not server in self._memcacheds: self._memcacheds[server] = MemcachedClientHelper.direct_client( self.rest, serverIp, serverPort, self.bucket) def restart_vbucket_connection(self, vbucket): self._vBucketMap_lock.acquire() server = deepcopy(self._vBucketMap[vbucket]) self._vBucketMap_lock.release() serverIp, serverPort = server.split(":") if server in self._memcacheds: self._memcacheds[server].close() self._memcacheds[server] = MemcachedClientHelper.direct_client( self.rest, serverIp, serverPort, self.bucket) def reconfig_vbucket_map(self, vbucket=-1): vb_ready = RestHelper(self.rest).vbucket_map_ready(self.bucket, 60) if not vb_ready: raise Exception("vbucket map is not ready for bucket {0}".format( self.bucket)) vBuckets = self.rest.get_vbuckets(self.bucket) self.vbucket_count = len(vBuckets) bucket_info = self.rest.get_bucket(self.bucket) nodes = bucket_info.nodes self._vBucketMap_lock.acquire() for vBucket in vBuckets: if vBucket.id == vbucket or vbucket == -1: self._vBucketMap[vBucket.id] = vBucket.master self._vBucketMap_lock.release() def memcached(self, key, fastforward=False): self._vBucketMap_lock.acquire() self._vBucketMapFastForward_lock.acquire() vBucketId = (zlib.crc32(key) >> 16) & (len(self._vBucketMap) - 1) if fastforward and vBucketId in self._vBucketMapFastForward: # only try the fastforward if we have an entry # otherwise we just wait for the main map to update self.start_vbucket_fastforward_connection(vBucketId) self._vBucketMap[vBucketId] = self._vBucketMapFastForward[ vBucketId] if vBucketId not in self._vBucketMap: msg = "vbucket map does not have an entry for vb : {0}" self._vBucketMapFastForward_lock.release() self._vBucketMap_lock.release() raise Exception(msg.format(vBucketId)) if self._vBucketMap[vBucketId] not in self._memcacheds: msg = "smart client does not have a mc connection for server : {0}" self._vBucketMapFastForward_lock.release() self._vBucketMap_lock.release() raise Exception(msg.format(self._vBucketMap[vBucketId])) r = self._memcacheds[self._vBucketMap[vBucketId]] self._vBucketMapFastForward_lock.release() self._vBucketMap_lock.release() return r def vbucketid(self, key): self._vBucketMap_lock.acquire() r = (zlib.crc32(key) >> 16) & (len(self._vBucketMap) - 1) self._vBucketMap_lock.release() return r def done(self): if self.dispatcher: self.dispatcher.shutdown() if self.verbose: self.log.info("dispatcher shutdown invoked") [self._memcacheds[ip].close() for ip in self._memcacheds] if self.verbose: self.log.info("closed all memcached open connections") self.dispatcher = None def _respond(self, item, event): timeout = 30 event.wait(timeout) if not event.is_set(): # if we timeout, then try to reconnect to the server # responsible for this vbucket self.restart_vbucket_connection(self.vbucketid(item['key'])) raise MemcachedTimeoutException(item, timeout) if "error" in item["response"]: raise item["response"]["error"] return item["response"]["return"] def get(self, key): event = Event() item = {"operation": "get", "key": key, "event": event, "response": {}} self.dispatcher.put(item) return self._respond(item, event) def gat(self, key, expiry): event = Event() item = { "operation": "gat", "key": key, "expiry": expiry, "event": event, "response": {} } self.dispatcher.put(item) return self._respond(item, event) def touch(self, key, expiry): event = Event() item = { "operation": "touch", "key": key, "expiry": expiry, "event": event, "response": {} } self.dispatcher.put(item) return self._respond(item, event) def cas(self, key, expiry, flags, old_value, value): event = Event() item = { "operation": "cas", "key": key, "expiry": expiry, "flags": flags, "old_value": old_value, "value": value, "event": event, "response": {} } self.dispatcher.put(item) return self._respond(item, event) def decr(self, key, amount=1, init=0, expiry=0): event = Event() item = { "operation": "decr", "key": key, "amount": amount, "init": init, "expiry": expiry, "event": event, "response": {} } self.dispatcher.put(item) return self._respond(item, event) def set(self, key, expiry, flags, value): event = Event() item = { "operation": "set", "key": key, "expiry": expiry, "flags": flags, "value": value, "event": event, "response": {} } self.dispatcher.put(item) return self._respond(item, event) def add(self, key, expiry, flags, value): event = Event() item = { "operation": "add", "key": key, "expiry": expiry, "flags": flags, "value": value, "event": event, "response": {} } self.dispatcher.put(item) return self._respond(item, event) def append(self, key, value, cas=0): event = Event() item = { "operation": "append", "key": key, "cas": cas, "value": value, "event": event, "response": {} } self.dispatcher.put(item) return self._respond(item, event) def delete(self, key, cas=0): event = Event() item = { "operation": "delete", "key": key, "cas": cas, "event": event, "response": {} } self.dispatcher.put(item) return self._respond(item, event) def prepend(self, key, value, cas=0): event = Event() item = { "operation": "prepend", "key": key, "cas": cas, "value": value, "event": event, "response": {} } self.dispatcher.put(item) return self._respond(item, event) def getl(self, key, expiry=15): event = Event() item = { "operation": "getl", "key": key, "expiry": expiry, "event": event, "response": {} } self.dispatcher.put(item) return self._respond(item, event) def replace(self, key, expiry, flags, value): event = Event() item = { "operation": "replace", "key": key, "expiry": expiry, "flags": flags, "value": value, "event": event, "response": {} } self.dispatcher.put(item) return self._respond(item, event) def incr(self, key, amount=1, init=0, expiry=0): event = Event() item = { "operation": "incr", "key": key, "amount": amount, "init": init, "expiry": expiry, "event": event, "response": {} } self.dispatcher.put(item) return self._respond(item, event)