Esempio n. 1
0
    def stat(self, collection_id, key, version_id=None):
        self._log.debug("collection_id=%s, key=%s, version_id=%r" % (
            collection_id, key, version_id
        ))
        
        if version_id is None:
            status_rows = current_status_of_key(
                self._node_local_connection,
                collection_id,
                key,
            )
        else:
            status_rows = current_status_of_version(
                self._node_local_connection,
                version_id
            )

        return status_rows
Esempio n. 2
0
def retrieve_meta(connection, collection_id, key, version_id=None):
    """
    get a dict of meta data associated with the segment
    """
    # TODO: find a non-blocking way to do this
    # TODO: don't just use the local node, it might be wrong

    if version_id is None:
        status_rows = current_status_of_key(connection, collection_id, key)
    else:
        status_rows = current_status_of_version(connection, version_id)

    if len(status_rows) == 0 or \
       status_rows[0].seg_status != segment_status_final:
        return None

    return dict(connection.fetch_all_rows(
        _retrieve_meta_query, [collection_id, status_rows[0].seg_id, ]
    ))
Esempio n. 3
0
    def destroy(self, timeout=None):
        if self._pending:
            raise AlreadyInProgress()

        # TODO: find a non-blocking way to do this
        if self.unified_id_to_delete is None:
            status_rows = current_status_of_key(
                self._node_local_connection, 
                self.collection_id,
                self.key
            )
        else:
            status_rows = current_status_of_version(
                self._node_local_connection, 
                self.unified_id_to_delete
            )

        if len(status_rows) == 0:
            raise DestroyFailedError("no status rows found")

        file_size = sum([row.seg_file_size for row in status_rows])

        for i, data_writer in enumerate(self.data_writers):
            segment_num = i + 1
            self._spawn(
                data_writer.destroy_key,
                self.collection_id,
                self.key,
                self.unified_id_to_delete,
                self._unified_id,
                self.timestamp,
                segment_num,
                _local_node_name,
            )
        self._join(timeout)
        self._done = []

        return file_size
Esempio n. 4
0
    def retrieve(self, timeout):
        # TODO: find a non-blocking way to do this
        # TODO: don't just use the local node, it might be wrong
        if self._version_id is None:
            status_rows = current_status_of_key(
                self._node_local_connection,
                self._collection_id, 
                self._key,
            )
        else:
            status_rows = current_status_of_version(
                self._node_local_connection, self._version_id
            )

        if len(status_rows) == 0:
            raise RetrieveFailedError("key not found %s %s" % (
                self._collection_id, self._key,
            ))

        is_available = False
        if status_rows[0].con_create_timestamp is None:
            is_available = status_rows[0].seg_status == segment_status_final
        else:
            is_available = status_rows[0].con_complete_timestamp is not None

        if not is_available:
            raise RetrieveFailedError("key is not available %s %s" % (
                self._collection_id, self._key,
            ))

        for status_row in status_rows:
            # spawn retrieve_key start, then spawn retrieve key next
            # until we are done
            start = True
            while True:
                self._sequence += 1
                self._log.debug("retrieve: %s %s %s" % (
                    self._sequence, 
                    status_row.seg_unified_id, 
                    status_row.seg_conjoined_part,
                ))
                # send a request to all node
                for i, data_reader in enumerate(self._data_readers):
                    if not data_reader.connected:
                        self._log.warn("ignoring disconnected reader %s" % (
                            str(data_reader),
                        ))
                        continue

                    segment_number = i + 1
                    if start:
                        function = data_reader.retrieve_key_start
                    else:
                        function = data_reader.retrieve_key_next
                    task = self._pending.spawn(
                        function, 
                        status_row.seg_unified_id,
                        status_row.seg_conjoined_part,
                        segment_number
                    )
                    task.link(self._done_link)
                    task.segment_number = segment_number
                    task.data_reader = data_reader
                    task.sequence = self._sequence

                # wait for, and process, replies from the nodes
                result_dict, completed = self._process_node_replies(timeout)
                self._log.debug("retrieve: completed sequence %s" % (
                    self._sequence,
                ))

                yield result_dict
                if completed:
                    break

                if start:
                    start = False