def on_children_change(children): # If we don't have any hook, stop watching if not self._has_hooks_for_group(group_id): return False children = set(children) last_children = self._group_members[group_id] for member_id in (children - last_children): # Copy function in case it's removed later from the # hook list hooks = copy.copy(self._hooks_join_group[group_id]) self._watchers.append( lambda: hooks.run( coordination.MemberJoinedGroup( group_id, utils.to_binary(member_id)))) for member_id in (last_children - children): # Copy function in case it's removed later from the # hook list hooks = copy.copy(self._hooks_leave_group[group_id]) self._watchers.append( lambda: hooks.run( coordination.MemberLeftGroup( group_id, utils.to_binary(member_id)))) self._group_members[group_id] = children
def __init__(self, member_id, parsed_url, options): super(RedisDriver, self).__init__() options = utils.collapse(options, exclude=self.CLIENT_LIST_ARGS) self._parsed_url = parsed_url self._options = options self._encoding = options.get('encoding', self.DEFAULT_ENCODING) timeout = options.get('timeout', self.CLIENT_DEFAULT_SOCKET_TO) self.timeout = int(timeout) self.membership_timeout = float( options.get('membership_timeout', timeout)) lock_timeout = options.get('lock_timeout', self.timeout) self.lock_timeout = int(lock_timeout) namespace = options.get('namespace', self.DEFAULT_NAMESPACE) self._namespace = utils.to_binary(namespace, encoding=self._encoding) self._group_prefix = self._namespace + b"_group" self._beat_prefix = self._namespace + b"_beats" self._groups = self._namespace + b"_groups" self._client = None self._member_id = utils.to_binary(member_id, encoding=self._encoding) self._acquired_locks = set() self._joined_groups = set() self._executor = utils.ProxyExecutor.build("Redis", options) self._started = False self._server_info = {} self._scripts = {}
def __init__(self, member_id, parsed_url, options): super(RedisDriver, self).__init__() options = utils.collapse(options, exclude=self.CLIENT_LIST_ARGS) self._parsed_url = parsed_url self._options = options self._encoding = options.get('encoding', self.DEFAULT_ENCODING) timeout = options.get('timeout', self.CLIENT_DEFAULT_SOCKET_TO) self.timeout = int(timeout) self.membership_timeout = float(options.get( 'membership_timeout', timeout)) lock_timeout = options.get('lock_timeout', self.timeout) self.lock_timeout = int(lock_timeout) namespace = options.get('namespace', self.DEFAULT_NAMESPACE) self._namespace = utils.to_binary(namespace, encoding=self._encoding) self._group_prefix = self._namespace + b"_group" self._beat_prefix = self._namespace + b"_beats" self._groups = self._namespace + b"_groups" self._client = None self._member_id = utils.to_binary(member_id, encoding=self._encoding) self._acquired_locks = set() self._joined_groups = set() self._executor = utils.ProxyExecutor.build("Redis", options) self._started = False self._server_info = {} self._scripts = {}
def _update_group_metadata(self, path, group_id): details = { u'group_id': utils.to_binary(group_id, encoding="utf8") } details[u'encoded'] = details[u"group_id"] != group_id details_blob = utils.dumps(details) with open(path, "wb") as fh: fh.write(details_blob)
def _update_group_metadata(self, path, group_id): details = {u'group_id': utils.to_binary(group_id, encoding="utf8")} details[u'encoded'] = details[u"group_id"] != group_id details_blob = utils.dumps(details) fd, name = tempfile.mkstemp("tooz", dir=self._tmpdir) with os.fdopen(fd, "wb") as fh: fh.write(details_blob) os.rename(name, path)
def _update_group_metadata(self, path, group_id): details = { u'group_id': utils.to_binary(group_id, encoding="utf8") } details[u'encoded'] = details[u"group_id"] != group_id details_blob = utils.dumps(details) fd, name = tempfile.mkstemp("tooz", dir=self._tmpdir) with os.fdopen(fd, "wb") as fh: fh.write(details_blob) os.rename(name, path)
def on_children_change(children): # If we don't have any hook, stop watching if not self._has_hooks_for_group(group_id): return False children = set(children) last_children = self._group_members[group_id] for member_id in (children - last_children): # Copy function in case it's removed later from the # hook list hooks = copy.copy(self._hooks_join_group[group_id]) self._watchers.append(lambda: hooks.run( coordination.MemberJoinedGroup( group_id, utils.to_binary(member_id)))) for member_id in (last_children - children): # Copy function in case it's removed later from the # hook list hooks = copy.copy(self._hooks_leave_group[group_id]) self._watchers.append(lambda: hooks.run( coordination.MemberLeftGroup(group_id, utils.to_binary(member_id)))) self._group_members[group_id] = children
def _do_join_group(): if not os.path.isdir(group_dir): raise coordination.GroupNotCreated(group_id) if os.path.isfile(me_path): raise coordination.MemberAlreadyExist(group_id, self._member_id) details = { u'capabilities': capabilities, u'joined_on': datetime.datetime.now(), u'member_id': utils.to_binary(self._member_id, encoding="utf-8") } details[u'encoded'] = details[u"member_id"] != self._member_id details_blob = utils.dumps(details) with open(me_path, "wb") as fh: fh.write(details_blob) self._joined_groups.add(group_id)
def _do_join_group(): if not os.path.exists(os.path.join(group_dir, ".metadata")): raise coordination.GroupNotCreated(group_id) if os.path.isfile(me_path): raise coordination.MemberAlreadyExist(group_id, self._member_id) details = { u'capabilities': capabilities, u'joined_on': datetime.datetime.now(), u'member_id': utils.to_binary(self._member_id, encoding="utf-8") } details[u'encoded'] = details[u"member_id"] != self._member_id details_blob = utils.dumps(details) with open(me_path, "wb") as fh: fh.write(details_blob) self._joined_groups.add(group_id)
def add_nodes(self, nodes, weight=1): """Add nodes to the hashring with equal weight :param nodes: Nodes to add. :param weight: How many resource instances this node should manage compared to the other nodes (default 1). Higher weights will be assigned more resources. Three nodes A, B and C with weights 1, 2 and 3 will each handle 1/6, 1/3 and 1/2 of the resources, respectively. """ for node in nodes: key = utils.to_binary(node, 'utf-8') key_hash = hashlib.md5(key) for r in six.moves.range(self._partition_number * weight): key_hash.update(key) self._ring[self._hash2int(key_hash)] = node self.nodes[node] = weight self._partitions = sorted(self._ring.keys())
def _group_path_to_id(self, base_path, group_path): """Translates a path into a group name. The group name is the last part of the path. So, we simply split on the path separator '/' and return the last element. Example: group_id = self._path_to_group_id("tooz/groups/helloworld") print(group_id) # "helloworld" """ if group_path.startswith(base_path): group_id = group_path[len(base_path):] else: group_id = group_path # if a group has members (sub-keys) it will contain a trailing / # we need to strip this to get just the name # if a group has no members there is no trailing / (for some reason) group_id = group_id.strip("/") return utils.to_binary(group_id)
def remove_node(self, node): """Remove a node from the hashring. Raises py:exc:`UnknownNode` :param node: Node to remove. """ try: weight = self.nodes.pop(node) except KeyError: raise UnknownNode(node) key = utils.to_binary(node, 'utf-8') key_hash = hashlib.md5(key) for r in six.moves.range(self._partition_number * weight): key_hash.update(key) del self._ring[self._hash2int(key_hash)] self._partitions = sorted(self._ring.keys())
def __init__(self, member_id, parsed_url, options): super(RedisDriver, self).__init__(member_id, parsed_url, options) self._parsed_url = parsed_url self._encoding = self._options.get('encoding', self.DEFAULT_ENCODING) timeout = self._options.get('timeout', self.CLIENT_DEFAULT_SOCKET_TO) self.timeout = int(timeout) self.membership_timeout = float( self._options.get('membership_timeout', timeout)) lock_timeout = self._options.get('lock_timeout', self.timeout) self.lock_timeout = int(lock_timeout) namespace = self._options.get('namespace', self.DEFAULT_NAMESPACE) self._namespace = utils.to_binary(namespace, encoding=self._encoding) self._group_prefix = self._namespace + b"_group" self._beat_prefix = self._namespace + b"_beats" self._groups = self._namespace + b"_groups" self._client = None self._acquired_locks = set() self._started = False self._server_info = {} self._scripts = {}
def __init__(self, member_id, parsed_url, options): super(RedisDriver, self).__init__(member_id, parsed_url, options) self._parsed_url = parsed_url self._encoding = self._options.get('encoding', self.DEFAULT_ENCODING) timeout = self._options.get('timeout', self.CLIENT_DEFAULT_SOCKET_TO) self.timeout = int(timeout) self.membership_timeout = float(self._options.get( 'membership_timeout', timeout)) lock_timeout = self._options.get('lock_timeout', self.timeout) self.lock_timeout = int(lock_timeout) namespace = self._options.get('namespace', self.DEFAULT_NAMESPACE) self._namespace = utils.to_binary(namespace, encoding=self._encoding) self._group_prefix = self._namespace + b"_group" self._beat_prefix = self._namespace + b"_beats" self._groups = self._namespace + b"_groups" self._client = None self._acquired_locks = set() self._started = False self._server_info = {} self._scripts = {}
def remove_node(self, node): """Remove a node from the hashring. Raises py:exc:`UnknownNode` :param node: Node to remove. """ try: weight = self.nodes.pop(node) except KeyError: raise UnknownNode(node) key = utils.to_binary(node, 'utf-8') if self._hash_function == 'md5': key_hash = md5(key, usedforsecurity=False) else: key_hash = hashlib.new(self._hash_function, key) for r in range(self._partition_number * weight): key_hash.update(key) del self._ring[self._hash2int(key_hash)] self._partitions = sorted(self._ring.keys())
def _encode_group_leader(self, group_id): group_id = utils.to_binary(group_id, encoding=self._encoding) return b"leader_of_" + group_id
def _encode_group_id(self, group_id, apply_namespace=True): group_id = utils.to_binary(group_id, encoding=self._encoding) if not apply_namespace: return group_id return self.NAMESPACE_SEP.join([self._group_prefix, group_id])
def _encode_member_id(self, member_id): member_id = utils.to_binary(member_id, encoding=self._encoding) if member_id == self.GROUP_EXISTS: raise ValueError("Not allowed to use private keys as a member id") return member_id
def _decode_member_id(self, member_id): return utils.to_binary(member_id, encoding=self._encoding)
def _encode_member_id(self, member_id): return self.MEMBER_PREFIX + utils.to_binary(member_id)
def _encode_group_id(self, group_id): return self.GROUP_PREFIX + utils.to_binary(group_id) + b"/"
def _encode_beat_id(self, member_id): member_id = utils.to_binary(member_id, encoding=self._encoding) return self.NAMESPACE_SEP.join([self._beat_prefix, member_id])
def _encode_group_member_id(self, group_id, member_id): return self._encode_group_id(group_id) + utils.to_binary(member_id)
def _make_filesystem_safe(cls, item): item = utils.to_binary(item, encoding="utf8") return hashlib.new(cls.HASH_ROUTINE, item).hexdigest()
def _encode_group_id(self, group_id): return self.GROUP_PREFIX + utils.to_binary(group_id)
def _decode_group_id(self, group_id): return utils.to_binary(group_id, encoding=self._encoding)
def _update_group_metadata(self, path, group_id): details = {u'group_id': utils.to_binary(group_id, encoding="utf8")} details[u'encoded'] = details[u"group_id"] != group_id details_blob = utils.dumps(details) with open(path, "wb") as fh: fh.write(details_blob)
def _encode_group_leader(self, group_id): return self.GROUP_LEADER_PREFIX + utils.to_binary(group_id)