예제 #1
0
    def alive_trusted_hosts(self, for_storage=None, for_restore=None):
        """
        Implements the @abstractmethod from
        C{AbstractTrustedHostsEnabledPeerBookMixin}.

        @precondition: for_storage or for_restore # (for_storage, for_restore)

        @type for_storage: bool, NoneType
        @type for_restore: bool, NoneType

        @rtype: col.Iterable
        """
        with self._rdbw_factory() as rdbw, self.__fdbw_factory() as fdbw:
            thosts = list(
                TrustedQueries.HostAtNode.get_all_trusted_hosts(
                    for_storage=for_storage,
                    for_restore=for_restore,
                    rdbw=rdbw))

            # Forcibly evaluated to the set to get the value out of the cursor
            thost_uuids = frozenset(th.uuid for th in thosts)

            # We've got all the trusted hosts.
            # Now filter only alive ones.
            host_models = FDBQueries.Users.get_hosts(
                host_uuids=thost_uuids,
                oldest_revive_ts=datetime.utcnow() - HOST_DEATH_TIME,
                fdbw=fdbw)

            # list comprehension instead of generator expression,
            # so that fdbw context is released early.
            return [
                Host(uuid=HostUUID.safe_cast_uuid(host_model.uuid),
                     urls=host_model.urls) for host_model in host_models
            ]
예제 #2
0
    def __getitem__(self, key):
        """
        @type key: UUID

        @rtype: Host
        """
        with self.__fdbw_factory() as fdbw:
            host_with_user = FDBQueries.Users.get_host_with_user(
                host_uuid=HostUUID.safe_cast_uuid(key), fdbw=fdbw)

        if host_with_user is None:
            return None
        else:
            host_model, user_model = host_with_user

            user = User(name=user_model.name, digest=user_model.digest)

            assert key == host_model.uuid, \
                   (key, user)

            host = Host(
                uuid=host_model.uuid,
                urls=host_model.urls,
                # Host-specific
                user=user)

            return host
예제 #3
0
    def peers_revived_within_interval(self, oldest_revive_ts,
                                      newest_revive_ts):
        """
        Get all peers which were last revived between C{oldest_revive_ts}
        and C{newest_revive_ts}.

        If any of C{oldest_revive_ts} or C{newest_revive_ts}, the appropriate
        side of the interval is considered open.

        @note: custom, node-specific method.

        @type oldest_revive_ts: datetime, NoneType
        @type newest_revive_ts: datetime, NoneType

        @return: an iterable over C{model.Host} objects.
        @rtype: col.Iterable
        """
        with self.__fdbw_factory() as fdbw:
            host_models = FDBQueries.Users.get_hosts(
                oldest_revive_ts=oldest_revive_ts,
                newest_revive_ts=newest_revive_ts,
                fdbw=fdbw)

            # list comprehension instead of generator expression,
            # so that fdbw context is released early.
            return [
                Host(uuid=HostUUID.safe_cast_uuid(host_model.uuid),
                     urls=host_model.urls) for host_model in host_models
            ]
예제 #4
0
    def create_new_host_for_user(self, username):
        """
        Implements the @abstractmethod from
        C{AbstractUsersEnabledPeerBookMixin}.

        @rtype: Host
        """
        uuid_created = False

        while not uuid_created:
            host_uuid = HostUUID.safe_cast_uuid(gen_uuid())
            # Ensure it doesn't start with 0x00 byte
            uuid_created = (host_uuid.bytes[0] != '\x00')

        host = Host(
            uuid=host_uuid,
            urls=[],
            # Host-specific
            name=str(host_uuid),
            user=self.get_user_by_name(username))

        with self._rdbw_factory() as rdbw, self.__fdbw_factory() as fdbw:
            dw = DataWrapper(rdbw=rdbw, fdbw=fdbw, bdbw=None)
            DataQueries.Inhabitants.add_host(username=username,
                                             hostname=str(host_uuid),
                                             host_uuid=host_uuid,
                                             trusted_host_caps=None,
                                             dw=dw)

        return host
예제 #5
0
    def _init_from_body(self, body):
        # N2H
        chunks_data = body['chunks']

        self.chunks_map = \
            {Host(uuid=HostUUID(host_uuid), urls=per_host_data['urls']):
                     [ChunkInfo.from_json(ch)()
                          for ch in per_host_data['chunks']]
                 for host_uuid, per_host_data
                     in chunks_data.iteritems()}
예제 #6
0
    def alive_peers(self, filter_uuid=None):
        """Implements the @abstractmethod from C{AbstractPeerBook}.
        """
        with self.__fdbw_factory() as fdbw:
            host_models = FDBQueries.Users.get_hosts(
                oldest_revive_ts=datetime.utcnow() - HOST_DEATH_TIME,
                fdbw=fdbw)

            # list comprehension instead of generator expression,
            # so that fdbw context is released early.
            return [
                Host(uuid=HostUUID.safe_cast_uuid(host_model.uuid),
                     urls=host_model.urls) for host_model in host_models
                if (filter_uuid is None or host_model.uuid != filter_uuid)
            ]
예제 #7
0
    def get_user_hosts(self, username):
        """
        Implements the @abstractmethod from
        C{AbstractUsersEnabledPeerBookMixin}.

        @rtype: col.Iterable
        @result: a (possibly, non-reiterable) iterable of C{Host} objects.
        """
        with self.__fdbw_factory() as fdbw:
            host_models = FDBQueries.Users.get_hosts(username=username,
                                                     fdbw=fdbw)

            # list comprehension instead of generator expression,
            # so that fdbw context is released early.
            return [
                Host(uuid=HostUUID.safe_cast_uuid(host_model.uuid),
                     urls=host_model.urls) for host_model in host_models
            ]
예제 #8
0
    def peers(self):
        """Implements the @abstractmethod from C{AbstractPeerBook}.

        At the moment, C{PeerBookInFastDB.peers} is equal to
        C{PeerBookInFastDB.hosts}.

        @returns: the (possibly non-reiterable) iterable of the Host objects
            available to the system.

        @note: the 'name' and 'user' fields are missing!
        """
        with self.__fdbw_factory() as fdbw:
            host_models = FDBQueries.Users.get_hosts(fdbw=fdbw)
            # list comprehension instead of generator expression,
            # so that fdbw context is released early.
            return [
                Host(uuid=HostUUID.safe_cast_uuid(host_model.uuid),
                     urls=host_model.urls) for host_model in host_models
            ]
예제 #9
0
    def on_begin(self):
        """
        @todo: If we ever change the app.known_peers to use C{PeerBook} class,
        then it is the class who must control the DB storage,
        rather than the caller.
        """
        cls = self.__class__

        assert self.is_incoming(), repr(self)

        _known_peers = self.manager.app.known_peers
        _message = self.message

        # Add the expected hosts to the list of known peers.
        add_peers_to_db = []
        for exp_peer_uuid in chain(_message.expect_replication.iterkeys(),
                                   _message.expect_restore.iterkeys()):
            assert isinstance(exp_peer_uuid, PeerUUID), \
                   exp_peer_uuid

            if exp_peer_uuid not in _known_peers:
                # Need to add some peer, unknown before.
                peer_to_add = Host(uuid=exp_peer_uuid)
                _known_peers[exp_peer_uuid] = peer_to_add
                # Add to the DB as well, but later
                add_peers_to_db.append(peer_to_add)

        expect_host_chunk_pairs = \
            chain(cls._expect_mapping_as_list(_message.expect_replication,
                                              is_restore=False),
                  cls._expect_mapping_as_list(_message.expect_restore,
                                              is_restore=True))

        # Do we need to actually update the database? Do that, if yes.
        with db.RDB() as rdbw:
            if add_peers_to_db:
                logger.debug('Adding peers %r', add_peers_to_db)
                Queries.Inhabitants.set_peers(add_peers_to_db, rdbw)

            HostQueries.HostChunks.expect_chunks(expect_host_chunk_pairs, rdbw)
예제 #10
0
파일: send_chunks.py 프로젝트: shvar/redfs
    def from_bson(cls, doc):
        r"""
        >>> from datetime import datetime
        >>> from uuid import UUID

        >>> from bson.binary import Binary

        >>> tr_start_time = datetime(2012, 9, 26, 14, 29, 48, 877434)
        >>> tr_uuid = UUID('1a82a181-741d-4a64-86e5-77a7dd000ba2')
        >>> tr_src_uuid = UUID('fa87ebfd-d498-4ba6-9f04-a933e4512b24')
        >>> tr_dst_uuid=UUID('e6aa4157-ee8a-449e-a2d5-3340a59e717d')

        >>> SendChunksTransactionState_Node.from_bson({
        ...     'chunks_map': {
        ...         '233ad9c2268f4506ab0f4c71461c5d88':
        ...             {'chunks':
        ...                  [{'crc32': 710928501, 'maxsize_code': 1,
        ...                    'hash': Binary('abcdabcd' * 8, 0),
        ...                    'uuid':
        ...                       UUID('0a7064b3-bef6-45c0-9e82-e9f9a40dfcf3'),
        ...                    'size': 73819}],
        ...              'urls': ['https://192.168.1.2:1234',
        ...                       'https://127.0.0.1:1234']},
        ...         'e96a073b3cd049a6b14a1fb04c221a9c':
        ...             {'chunks':
        ...                  [{'crc32': 134052443, 'maxsize_code': 1,
        ...                    'hash': Binary('abcdefgh' * 8, 0),
        ...                    'uuid':
        ...                       UUID('5b237ceb-300d-4c88-b4c0-6331cb14b5b4'),
        ...                    'size': 2097152},
        ...                   {'crc32': 2120017837, 'maxsize_code': 0,
        ...                    'hash': Binary('01234567' * 8, 0),
        ...                    'uuid':
        ...                       UUID('940f0711-52d7-42fb-bf4c-818580f432dc'),
        ...                    'size': 143941},
        ...                   {'crc32': 3704113112, 'maxsize_code': 1,
        ...                    'hash': Binary('76543210' * 8, 0),
        ...                    'uuid':
        ...                       UUID('a5b605f2-6ea5-49f3-8658-d217b7e8e784'),
        ...                    'size': 2097151}],
        ...              'urls': ['https://192.168.2.3:4242']}
        ...     }
        ... })(tr_start_time=tr_start_time,
        ...    tr_uuid=tr_uuid,
        ...    tr_src_uuid=tr_src_uuid,
        ...    tr_dst_uuid=tr_dst_uuid
        ... )  # doctest:+ELLIPSIS,+NORMALIZE_WHITESPACE
        SendCh...State_Node(chunks_map={Host(uuid=PeerUUID('e96a073b...1a9c'),
                                             urls=['https://192...3:4242']):
                [models.t...ChunkInfo(uuid=ChunkUUID('5b237ceb-...31cb14b5b4'),
                                      maxsize_code=1,
                                      hash=unhexlify('616263646...6465666768'),
                                      size=2097152, crc32=0x07FD7A5B),
                 models.t...ChunkInfo(uuid=ChunkUUID('940f0711-...8580f432dc'),
                                      maxsize_code=0,
                                      hash=unhexlify('303132333...3334353637'),
                                      size=143941, crc32=0x7E5CE7AD),
                 models.t...ChunkInfo(uuid=ChunkUUID('a5b605f2-...17b7e8e784'),
                                      maxsize_code=1,
                                      hash=unhexlify('373635343...3433323130'),
                                      size=2097151, crc32=0xDCC847D8)],
            Host(uuid=PeerUUID('233ad9c2-268f-4506-ab0f-4c71461c5d88'),
                 urls=['https://192.168.1.2:1234', 'https://127.0.0.1:1234']):
                [models.t...ChunkInfo(uuid=ChunkUUID('0a7064b3-...f9a40dfcf3'),
                                      maxsize_code=1,
                                      hash=unhexlify('616263646...6461626364'),
                                      size=73819, crc32=0x2A5FE875)]})
        """
        assert cls.validate_schema(doc), repr(doc)

        chunks_map_preprocessed = \
            {Host(uuid=PeerUUID(host_uuid_str),
                  urls=per_chunk_data['urls']):
                 [model_ChunkInfo.from_bson(doc)()
                      for doc in per_chunk_data['chunks']]
                 for host_uuid_str, per_chunk_data
                     in doc['chunks_map'].iteritems()}

        return partial(
            super(SendChunksTransactionState_Node, cls).from_bson(doc),
            # Mandatory
            chunks_map=chunks_map_preprocessed)
예제 #11
0
    def _init_from_body(self, body):
        r"""
        >>> from common.typed_uuids import ChunkUUID, MessageUUID, PeerUUID

        >>> host = Host(uuid=UUID('00000000-7606-420c-8a98-a6c8728ac98d'))
        >>> node = Node(uuid=UUID('11111111-79b4-49e0-b72e-8c4ce6a2aed9'))

        >>> msg0 = ProgressMessage(
        ...     src=node,
        ...     dst=host,
        ...     uuid=UUID('4ac2536a-4a1e-4b08-ad4e-11bd675fdf15')
        ... )
        >>> ProgressMessage._init_from_body \
        ...                ._without_bunzip2._without_json_loads(msg0, {})

        >>> msg1 = ProgressMessage(
        ...     src=node,
        ...     dst=host,
        ...     uuid=UUID('baebd0f2-fc58-417b-97ee-08a892cd5a8f'),
        ...     status_code=1
        ... )
        >>> # TODO: add other fields, see _get_body() doctest for that.
        >>> data1 = {
        ...     'chunks': [
        ...         {'crc32': 2120017837,
        ...          'maxsize_code': 0,
        ...          'hash': 'MDEyMzQ1NjcwMTIzNDU2NzAxMjM0NTY3'
        ...                  'MDEyMzQ1NjcwMTIzNDU2NzAxMjM0NTY3'
        ...                  'MDEyMzQ1NjcwMTIzNDU2Nw==',
        ...          'uuid': '940f071152d742fbbf4c818580f432dc',
        ...          'size': 143941},
        ...         {'crc32': 3704113112,
        ...          'maxsize_code': 1,
        ...          'hash': 'NzY1NDMyMTA3NjU0MzIxMDc2NTQzMjEw'
        ...                  'NzY1NDMyMTA3NjU0MzIxMDc2NTQzMjEw'
        ...                  'NzY1NDMyMTA3NjU0MzIxMA==',
        ...          'uuid': 'a5b605f26ea549f38658d217b7e8e784',
        ...          'size': 2097152},
        ...         {'crc32': 134052443,
        ...          'maxsize_code': 1,
        ...          'hash': 'YWJjZGVmZ2hhYmNkZWZnaGFiY2RlZmdo'
        ...                  'YWJjZGVmZ2hhYmNkZWZnaGFiY2RlZmdo'
        ...                  'YWJjZGVmZ2hhYmNkZWZnaA==',
        ...          'uuid': '5b237ceb300d4c88b4c06331cb14b5b4',
        ...          'size': 2097152}]
        ... }

        >>> ProgressMessage._init_from_body \
        ...                ._without_bunzip2._without_json_loads(msg1, data1)
        >>> msg1  # doctest:+ELLIPSIS,+NORMALIZE_WHITESPACE
        ProgressMessage(src=Node(uuid=PeerUUID('11111111-...-8c4ce6a2aed9')),
            dst=Host(uuid=PeerUUID('00000000-7606-420c-8a98-a6c8728ac98d')),
            uuid=MessageUUID('baebd0f2-fc58-417b-97ee-08a892cd5a8f'),
            status_code=1
            [
                chunks_by_uuid={ChunkUUID('940f0711-52d7-...-818580f432dc'):
                        ChunkInfo(uuid=ChunkUUID('940f0711-...-818580f432dc'),
                            maxsize_code=0,
                            hash=unhexlify('30313233343...373031323334353637'),
                            size=143941, crc32=0x7E5CE7AD),
                    ChunkUUID('a5b605f2-6ea5-...-d217b7e8e784'):
                        ChunkInfo(uuid=ChunkUUID('a5b605f2-...-d217b7e8e784'),
                            maxsize_code=1,
                            hash=unhexlify('37363534333...303736353433323130'),
                            size=2097152, crc32=0xDCC847D8),
                    ChunkUUID('5b237ceb-300d-4c88-b4c0-6331cb14b5b4'):
                        ChunkInfo(uuid=ChunkUUID('5b237ceb-...-6331cb14b5b4'),
                            maxsize_code=1,
                            hash=unhexlify('61626364656...686162636465666768'),
                            size=2097152, crc32=0x07FD7A5B)}
            ])
        """
        # H2N

        # Parse dataset
        if 'dataset' in body:
            self.completion = False
            self.dataset = DatasetOnChunks.from_json(body['dataset'])()
        elif 'dataset_completed' in body:
            self.completion = True
            self.dataset = \
                DatasetOnChunks.from_json(body['dataset_completed'])()

        # Parse chunks_by_uuid
        chunks_unp = body.get('chunks', None)
        if chunks_unp is not None:
            _chunks = (ChunkInfo.from_json(ch)() for ch in chunks_unp)
            self.chunks_by_uuid = {ch.uuid: ch for ch in _chunks}

        # Parse host_chunks_map
        host_chunks_map_unp = body.get('host_chunks_map', None)
        if host_chunks_map_unp is not None:
            host_chunks_map = \
                {Host(uuid=UUID(uuid)):
                         [ProgressNotificationPerHost.from_json(pr)()
                              for pr in per_host_notifs]
                     for uuid, per_host_notifs
                         in host_chunks_map_unp.iteritems()}
            self.chunks_map_getter = lambda: host_chunks_map

        # Parse blocks_map
        blocks_map_unp = body.get('blocks_map', None)
        if blocks_map_unp is not None:
            self.blocks_map = [
                Chunk.Block.from_json(bl)() for bl in blocks_map_unp
            ]
예제 #12
0
    def _on_child_pbh_completed(self, pbh_state):
        """
        This method is called after the child PROVIDE_BACKUP_HOSTS transaction
        has succeeded.

        @type pbh_state: ProvideBackupHostsTransaction_Host.State
        """
        if not ProvideBackupHostsMessage.ResultCodes \
                                        .is_good(pbh_state.ack_result_code):
            # Our backup request was rejected!
            self.ack_result_code = \
                BackupMessage.ResultCodes.from_provide_backup_host_result_code(
                    pbh_state.ack_result_code)
            self.__complete_backup_transaction()

        else:
            # Proceed with the backup
            self.target_hosts = \
                _target_hosts = \
                    {Host(uuid=uuid,
                          urls=per_host.urls):
                             col.Counter(per_host.chunks_by_size)
                         for uuid, per_host in pbh_state.ack_hosts_to_use
                                                        .iteritems()}

            self.manager.app.known_peers.update(
                {host.uuid: host
                     for host in _target_hosts.iterkeys()})

            logger.debug('CHILD PBH COMPLETED (%r), using target hosts '
                             'for backup: %r',
                         pbh_state,
                         _target_hosts.keys())

            _message = self.message


            @exceptions_logged(logger)
            @contract_epydoc
            def _on_dataset_progress_success(p_state):
                """
                This method is called after the PROGRESS transaction
                reporting to the node about the dataset has succeeded.

                @type p_state: ProgressTransaction_Host.State
                """
                logger.info('Reported to the Node about the dataset %r '
                                'successfully.',
                            self.dataset)
                self.__notify_about_upload_progress()
                self.__upload_more_chunks()


            # Notify the node about the new dataset which started uploading.
            p1_tr = self.manager.create_new_transaction(name='PROGRESS',
                                                        src=_message.dst,
                                                        dst=_message.src,
                                                        parent=self,
                                                        # PROGRESS-specific
                                                        dataset=self.dataset)
            p1_tr.completed.addCallbacks(
                _on_dataset_progress_success,
                partial(logger.error, 'Dataset reporting issue: %r'))