def alive_trusted_hosts(self, for_storage=None, for_restore=None): """ Implements the @abstractmethod from C{AbstractTrustedHostsEnabledPeerBookMixin}. @precondition: for_storage or for_restore # (for_storage, for_restore) @type for_storage: bool, NoneType @type for_restore: bool, NoneType @rtype: col.Iterable """ with self._rdbw_factory() as rdbw, self.__fdbw_factory() as fdbw: thosts = list(TrustedQueries.HostAtNode.get_all_trusted_hosts( for_storage=for_storage, for_restore=for_restore, rdbw=rdbw)) # Forcibly evaluated to the set to get the value out of the cursor thost_uuids = frozenset(th.uuid for th in thosts) # We've got all the trusted hosts. # Now filter only alive ones. host_models = FDBQueries.Users.get_hosts( host_uuids=thost_uuids, oldest_revive_ts=datetime.utcnow() - HOST_DEATH_TIME, fdbw=fdbw) # list comprehension instead of generator expression, # so that fdbw context is released early. return [Host(uuid=HostUUID.safe_cast_uuid(host_model.uuid), urls=host_model.urls) for host_model in host_models]
def __init__(self, uuid, urls, last_seen=datetime.min, revive_ts=datetime.min, last_msg_sync_ts=datetime.min, *args, **kwargs): """Constructor. @param uuid: the UUID of the Host. @type uuid: UUID @param urls: the iterable with the urls of the host. @type urls: col.Iterable @param last_seen: the last time when the host was seen online. Is not reset to C{datetime.min} on logout. @type last_seen: datetime @param revive_ts: the last time when the host was seen revived to be considered online. Is reset to C{datetime.min} on logout. @type revive_ts: datetime @param last_msg_sync_ts: the last time when any message was successfully synced. to this host. @type last_msg_sync_ts: datetime """ super(Host, self).__init__(*args, **kwargs) self.uuid = HostUUID.safe_cast_uuid(uuid) self.urls = list(urls) self.last_seen = last_seen self.revive_ts = revive_ts self.last_msg_sync_ts = last_msg_sync_ts
def peers_revived_within_interval(self, oldest_revive_ts, newest_revive_ts): """ Get all peers which were last revived between C{oldest_revive_ts} and C{newest_revive_ts}. If any of C{oldest_revive_ts} or C{newest_revive_ts}, the appropriate side of the interval is considered open. @note: custom, node-specific method. @type oldest_revive_ts: datetime, NoneType @type newest_revive_ts: datetime, NoneType @return: an iterable over C{model.Host} objects. @rtype: col.Iterable """ with self.__fdbw_factory() as fdbw: host_models = FDBQueries.Users.get_hosts( oldest_revive_ts=oldest_revive_ts, newest_revive_ts=newest_revive_ts, fdbw=fdbw) # list comprehension instead of generator expression, # so that fdbw context is released early. return [ Host(uuid=HostUUID.safe_cast_uuid(host_model.uuid), urls=host_model.urls) for host_model in host_models ]
def create_new_host_for_user(self, username): """ Implements the @abstractmethod from C{AbstractUsersEnabledPeerBookMixin}. @rtype: Host """ uuid_created = False while not uuid_created: host_uuid = HostUUID.safe_cast_uuid(gen_uuid()) # Ensure it doesn't start with 0x00 byte uuid_created = (host_uuid.bytes[0] != '\x00') host = Host(uuid=host_uuid, urls=[], # Host-specific name=str(host_uuid), user=self.get_user_by_name(username)) with self._rdbw_factory() as rdbw, self.__fdbw_factory() as fdbw: dw = DataWrapper(rdbw=rdbw, fdbw=fdbw, bdbw=None) DataQueries.Inhabitants.add_host(username=username, hostname=str(host_uuid), host_uuid=host_uuid, trusted_host_caps=None, dw=dw) return host
def mark_as_just_seen_alive(self, key, urls): """Implements the @abstractmethod from C{AbstractPeerBook}. @type urls: col.Iterable @type key: UUID """ assert not in_main_thread() with self.__fdbw_factory() as fdbw: last_revive_ts = FDBQueries.Users.update_host_info( host_uuid=HostUUID.safe_cast_uuid(key), urls=list(urls), timestamp=datetime.utcnow(), fdbw=fdbw) if last_revive_ts is None: # But is this code path supported now? logger.debug('Marking %s as alive for the first time', key) else: was_dead = datetime.utcnow() - last_revive_ts > HOST_DEATH_TIME if was_dead: logger.debug('Marking %s as just seen alive, was dead', key) else: logger.verbose('Marking %s as just seen alive, was alive', key)
def peers_revived_within_interval(self, oldest_revive_ts, newest_revive_ts): """ Get all peers which were last revived between C{oldest_revive_ts} and C{newest_revive_ts}. If any of C{oldest_revive_ts} or C{newest_revive_ts}, the appropriate side of the interval is considered open. @note: custom, node-specific method. @type oldest_revive_ts: datetime, NoneType @type newest_revive_ts: datetime, NoneType @return: an iterable over C{model.Host} objects. @rtype: col.Iterable """ with self.__fdbw_factory() as fdbw: host_models = FDBQueries.Users.get_hosts( oldest_revive_ts=oldest_revive_ts, newest_revive_ts=newest_revive_ts, fdbw=fdbw) # list comprehension instead of generator expression, # so that fdbw context is released early. return [Host(uuid=HostUUID.safe_cast_uuid(host_model.uuid), urls=host_model.urls) for host_model in host_models]
def create_new_host_for_user(self, username): """ Implements the @abstractmethod from C{AbstractUsersEnabledPeerBookMixin}. @rtype: Host """ uuid_created = False while not uuid_created: host_uuid = HostUUID.safe_cast_uuid(gen_uuid()) # Ensure it doesn't start with 0x00 byte uuid_created = (host_uuid.bytes[0] != '\x00') host = Host( uuid=host_uuid, urls=[], # Host-specific name=str(host_uuid), user=self.get_user_by_name(username)) with self._rdbw_factory() as rdbw, self.__fdbw_factory() as fdbw: dw = DataWrapper(rdbw=rdbw, fdbw=fdbw, bdbw=None) DataQueries.Inhabitants.add_host(username=username, hostname=str(host_uuid), host_uuid=host_uuid, trusted_host_caps=None, dw=dw) return host
def alive_trusted_hosts(self, for_storage=None, for_restore=None): """ Implements the @abstractmethod from C{AbstractTrustedHostsEnabledPeerBookMixin}. @precondition: for_storage or for_restore # (for_storage, for_restore) @type for_storage: bool, NoneType @type for_restore: bool, NoneType @rtype: col.Iterable """ with self._rdbw_factory() as rdbw, self.__fdbw_factory() as fdbw: thosts = list( TrustedQueries.HostAtNode.get_all_trusted_hosts( for_storage=for_storage, for_restore=for_restore, rdbw=rdbw)) # Forcibly evaluated to the set to get the value out of the cursor thost_uuids = frozenset(th.uuid for th in thosts) # We've got all the trusted hosts. # Now filter only alive ones. host_models = FDBQueries.Users.get_hosts( host_uuids=thost_uuids, oldest_revive_ts=datetime.utcnow() - HOST_DEATH_TIME, fdbw=fdbw) # list comprehension instead of generator expression, # so that fdbw context is released early. return [ Host(uuid=HostUUID.safe_cast_uuid(host_model.uuid), urls=host_model.urls) for host_model in host_models ]
def __getitem__(self, key): """ @type key: UUID @rtype: Host """ with self.__fdbw_factory() as fdbw: host_with_user = FDBQueries.Users.get_host_with_user( host_uuid=HostUUID.safe_cast_uuid(key), fdbw=fdbw) if host_with_user is None: return None else: host_model, user_model = host_with_user user = User(name=user_model.name, digest=user_model.digest) assert key == host_model.uuid, \ (key, user) host = Host( uuid=host_model.uuid, urls=host_model.urls, # Host-specific user=user) return host
def from_bson(cls, doc, my_node, body): """ @param my_node: the current Node. @type my_node: Node @param body: message body @type body: basestring """ assert cls.validate_schema(doc), repr(doc) # We need to get the proper object (either C{Host} or C{Node}) for # the peer by its UUID, to pass to C{MessageFromBigDB} constructor. # For now let's assume that if any peer (src or dst) # has the same UUID as our Node, the peer is the C{Node}; # otherwise, it's a C{Host}. peer_from_uuid = \ lambda u: my_node if u == my_node.uuid \ else HostAtNode(uuid=HostUUID.safe_cast_uuid(u)) return partial(super(MessageFromBigDB, cls).from_bson(doc), # AbstractMessage src=peer_from_uuid(doc['src']), dst=peer_from_uuid(doc['dst']), is_ack=doc['ack'], direct=doc['src'] == my_node.uuid or doc['dst'] == my_node.uuid, uuid=MessageUUID.safe_cast_uuid(doc['uuid']), status_code=doc.get('status_code', 0), start_time=doc['uploadDate'], # MessageFromBigDB name=doc['name'], body=body)
def __getitem__(self, key): """ @type key: UUID @rtype: Host """ with self.__fdbw_factory() as fdbw: host_with_user = FDBQueries.Users.get_host_with_user( host_uuid=HostUUID.safe_cast_uuid(key), fdbw=fdbw) if host_with_user is None: return None else: host_model, user_model = host_with_user user = User(name=user_model.name, digest=user_model.digest) assert key == host_model.uuid, \ (key, user) host = Host(uuid=host_model.uuid, urls=host_model.urls, # Host-specific user=user) return host
def _init_from_body(self, body): # N2H chunks_data = body['chunks'] self.chunks_map = \ {Host(uuid=HostUUID(host_uuid), urls=per_host_data['urls']): [ChunkInfo.from_json(ch)() for ch in per_host_data['chunks']] for host_uuid, per_host_data in chunks_data.iteritems()}
def mark_as_just_logged_out(self, key): """Implements the @abstractmethod from C{AbstractPeerBook}. @type key: UUID """ with self.__fdbw_factory() as fdbw: last_revive_ts = FDBQueries.Users.update_host_info( host_uuid=HostUUID.safe_cast_uuid(key), urls=[], timestamp=None, # "unsee" me! fdbw=fdbw)
def from_bson(cls, doc): assert cls.validate_schema(doc), repr(doc) return partial(super(Host, cls).from_bson(doc), uuid=HostUUID.safe_cast_uuid(doc['uuid']), urls=doc['urls'], last_seen=doc['last_seen'], # TODO: change to doc['revive_ts'] when all records # are converted. revive_ts=doc.get('revive_ts', datetime.min), last_msg_sync_ts=doc['last_msg_sync_ts'])
def from_bson(cls, doc): assert cls.validate_schema(doc), repr(doc) return partial( super(Host, cls).from_bson(doc), uuid=HostUUID.safe_cast_uuid(doc['uuid']), urls=doc['urls'], last_seen=doc['last_seen'], # TODO: change to doc['revive_ts'] when all records # are converted. revive_ts=doc.get('revive_ts', datetime.min), last_msg_sync_ts=doc['last_msg_sync_ts'])
def is_peer_alive(self, key): """Implements the @abstractmethod from C{AbstractPeerBook}. @type key: UUID @rtype: bool """ with self.__fdbw_factory() as fdbw: return FDBQueries.Users.is_peer_alive( host_uuid=HostUUID.safe_cast_uuid(key), oldest_revive_ts=datetime.utcnow() - HOST_DEATH_TIME, fdbw=fdbw)
def alive_peers(self, filter_uuid=None): """Implements the @abstractmethod from C{AbstractPeerBook}. """ with self.__fdbw_factory() as fdbw: host_models = FDBQueries.Users.get_hosts( oldest_revive_ts=datetime.utcnow() - HOST_DEATH_TIME, fdbw=fdbw) # list comprehension instead of generator expression, # so that fdbw context is released early. return [ Host(uuid=HostUUID.safe_cast_uuid(host_model.uuid), urls=host_model.urls) for host_model in host_models if (filter_uuid is None or host_model.uuid != filter_uuid) ]
def alive_peers(self, filter_uuid=None): """Implements the @abstractmethod from C{AbstractPeerBook}. """ with self.__fdbw_factory() as fdbw: host_models = FDBQueries.Users.get_hosts( oldest_revive_ts=datetime.utcnow() - HOST_DEATH_TIME, fdbw=fdbw) # list comprehension instead of generator expression, # so that fdbw context is released early. return [Host(uuid=HostUUID.safe_cast_uuid(host_model.uuid), urls=host_model.urls) for host_model in host_models if (filter_uuid is None or host_model.uuid != filter_uuid)]
def get_user_hosts(self, username): """ Implements the @abstractmethod from C{AbstractUsersEnabledPeerBookMixin}. @rtype: col.Iterable @result: a (possibly, non-reiterable) iterable of C{Host} objects. """ with self.__fdbw_factory() as fdbw: host_models = FDBQueries.Users.get_hosts(username=username, fdbw=fdbw) # list comprehension instead of generator expression, # so that fdbw context is released early. return [Host(uuid=HostUUID.safe_cast_uuid(host_model.uuid), urls=host_model.urls) for host_model in host_models]
def peers(self): """Implements the @abstractmethod from C{AbstractPeerBook}. At the moment, C{PeerBookInFastDB.peers} is equal to C{PeerBookInFastDB.hosts}. @returns: the (possibly non-reiterable) iterable of the Host objects available to the system. @note: the 'name' and 'user' fields are missing! """ with self.__fdbw_factory() as fdbw: host_models = FDBQueries.Users.get_hosts(fdbw=fdbw) # list comprehension instead of generator expression, # so that fdbw context is released early. return [Host(uuid=HostUUID.safe_cast_uuid(host_model.uuid), urls=host_model.urls) for host_model in host_models]
def get_user_hosts(self, username): """ Implements the @abstractmethod from C{AbstractUsersEnabledPeerBookMixin}. @rtype: col.Iterable @result: a (possibly, non-reiterable) iterable of C{Host} objects. """ with self.__fdbw_factory() as fdbw: host_models = FDBQueries.Users.get_hosts(username=username, fdbw=fdbw) # list comprehension instead of generator expression, # so that fdbw context is released early. return [ Host(uuid=HostUUID.safe_cast_uuid(host_model.uuid), urls=host_model.urls) for host_model in host_models ]
def peers(self): """Implements the @abstractmethod from C{AbstractPeerBook}. At the moment, C{PeerBookInFastDB.peers} is equal to C{PeerBookInFastDB.hosts}. @returns: the (possibly non-reiterable) iterable of the Host objects available to the system. @note: the 'name' and 'user' fields are missing! """ with self.__fdbw_factory() as fdbw: host_models = FDBQueries.Users.get_hosts(fdbw=fdbw) # list comprehension instead of generator expression, # so that fdbw context is released early. return [ Host(uuid=HostUUID.safe_cast_uuid(host_model.uuid), urls=host_model.urls) for host_model in host_models ]
def _expect_map_decode(tr_dict): r""" Encode the dictionary mapping the Host UUID to the chunk list from the transportation form to the original one. >>> ReceiveChunksMessage ._expect_map_decode({ ... '233ad9c2268f4506ab0f4c71461c5d88': ... [{'crc32': 710928501, ... 'maxsize_code': 1, ... 'hash': 'YWJjZGFiY2RhYmNkYWJjZGFiY2RhYmNk' ... 'YWJjZGFiY2RhYmNkYWJjZGFiY2RhYmNk' ... 'YWJjZGFiY2RhYmNkYWJjZA==', ... 'uuid': '0a7064b3bef645c09e82e9f9a40dfcf3', ... 'size': 73819}], ... 'e96a073b3cd049a6b14a1fb04c221a9c': ... [{'crc32': 134052443, ... 'maxsize_code': 1, ... 'hash': 'YWJjZGVmZ2hhYmNkZWZnaGFiY2RlZmdo' ... 'YWJjZGVmZ2hhYmNkZWZnaGFiY2RlZmdo' ... 'YWJjZGVmZ2hhYmNkZWZnaA==', ... 'uuid': '5b237ceb300d4c88b4c06331cb14b5b4', ... 'size': 2097152}, ... {'crc32': 2120017837, ... 'maxsize_code': 0, ... 'hash': 'MDEyMzQ1NjcwMTIzNDU2NzAxMjM0NTY3' ... 'MDEyMzQ1NjcwMTIzNDU2NzAxMjM0NTY3' ... 'MDEyMzQ1NjcwMTIzNDU2Nw==', ... 'uuid': '940f071152d742fbbf4c818580f432dc', ... 'size': 143941}, ... {'crc32': 3704113112, ... 'maxsize_code': 1, ... 'hash': 'NzY1NDMyMTA3NjU0MzIxMDc2NTQzMjEw' ... 'NzY1NDMyMTA3NjU0MzIxMDc2NTQzMjEw' ... 'NzY1NDMyMTA3NjU0MzIxMA==', ... 'uuid': 'a5b605f26ea549f38658d217b7e8e784', ... 'size': 2097151}] ... }) # doctest:+ELLIPSIS,+NORMALIZE_WHITESPACE {HostUUID('e96a073b-3cd0-49a6-b14a-1fb04c221a9c'): [ChunkInfo(uuid=ChunkUUID('5b237ceb-300d-4c88-b4c0-6331cb14b5b4'), maxsize_code=1, hash=unhexlify('6162636465666...67686162636465666768'), size=2097152, crc32=0x07FD7A5B), ChunkInfo(uuid=ChunkUUID('940f0711-52d7-42fb-bf4c-818580f432dc'), maxsize_code=0, hash=unhexlify('3031323334353...36373031323334353637'), size=143941, crc32=0x7E5CE7AD), ChunkInfo(uuid=ChunkUUID('a5b605f2-6ea5-49f3-8658-d217b7e8e784'), maxsize_code=1, hash=unhexlify('3736353433323...31303736353433323130'), size=2097151, crc32=0xDCC847D8)], HostUUID('233ad9c2-268f-4506-ab0f-4c71461c5d88'): [ChunkInfo(uuid=ChunkUUID('0a7064b3-bef6-45c0-9e82-e9f9a40dfcf3'), maxsize_code=1, hash=unhexlify('6162636461626...63646162636461626364'), size=73819, crc32=0x2A5FE875)]} """ return { HostUUID(inh_uuid_str): [ChunkInfo.from_json(c)() for c in per_inh] for inh_uuid_str, per_inh in tr_dict.iteritems() }