def start_retrieve_slice_tests(state):
    """
    start a deferred request for retrieve of slices
    """
    global _pending_retrieve_slice_test_count

    for key in state["slice-data"].keys():
        log.msg("retrieving slice '%s' (%s, %s)" % (
                key, 
                state["slice-data"][key]["offset"],
                state["slice-data"][key]["size"], ), 
                logLevel=logging.DEBUG)

        consumer = BufferedConsumer()

        path = compute_retrieve_path(key)
        range_header_tuple = \
            compute_range_header_tuple(state["slice-data"][key]["offset"],
                                       state["slice-data"][key]["size"])
        headers = dict([range_header_tuple])
        expected_status = frozenset([httplib.PARTIAL_CONTENT, ])

        deferred = start_collection_request(state["identity"],
                                            "GET", 
                                            state["collection-name"],
                                            path,
                                            response_consumer=consumer,
                                            additional_headers=headers,
                                            valid_http_status=expected_status)
        deferred.addCallback(_retrieve_slice_data, state, key, consumer)
        deferred.addErrback(_retrieve_slice_error, state, key)

        _pending_retrieve_slice_test_count += 1
def start_single_part_archives(state):
    """
    start a group of deferred archive requests
    """
    global _pending_archive_count

    log.msg("starting user_name = %s collection = %s" % (
            state["identity"].user_name, 
            state["collection-name"], ), 
            logLevel=logging.DEBUG)

    # start all the keys archiving
    for i in range(state["args"].number_of_single_part_keys):
        prefix = random.choice(state["prefixes"])
        key = "".join([prefix, state["separator"], 
                      "single_part_key_%05d" % (i+1, )])
        log.msg("starting archive for %r" % (key, ), logLevel=logging.DEBUG)

        consumer = BufferedConsumer()

        path = compute_archive_path(key)

        length = random.randint(state["args"].min_single_part_file_size, 
                                state["args"].max_single_part_file_size)
        producer = PassThruProducer(key, length)

        state["key-data"][key] = {"length"              : length,
                                  "md5"                 : md5(),
                                  "version-identifier"  : None}

        deferred = start_collection_request(state["identity"],
                                            "POST", 
                                            state["collection-name"],
                                            path, 
                                            response_consumer=consumer, 
                                            body_producer=producer)
        deferred.addCallback(_archive_result, state, key, consumer)
        deferred.addErrback(_archive_error, state, key)

        _pending_archive_count += 1

        # loop on callLater until all archive is complete
        feed_delay = random.uniform(state["args"].min_feed_delay, 
                                    state["args"].max_feed_delay)

        reactor.callLater(feed_delay, _feed_producer, key, producer, state)
def start_head_tests(state):
    """
    start a deferred request for HEAD on every key
    """
    global _pending_head_test_count

    for key in state["key-data"].keys():
        log.msg("starting HEAD for %r" % (key, ), logLevel=logging.DEBUG)

        path = compute_head_path(key)

        deferred = start_collection_request(state["identity"],
                                            "HEAD", 
                                            state["collection-name"],
                                            path)
        deferred.addCallback(_head_result, state, key)
        deferred.addErrback(_head_error, state, key)

        _pending_head_test_count += 1
def start_conjoined_archives(state):
    """
    start a group of deferred archive requests
    """
    global _pending_start_conjoined_count

    log.msg("start conjoined user_name = %s collection = %s" % (
            state["identity"].user_name, 
            state["collection-name"], ), 
            logLevel=logging.DEBUG)

    # start all the conjoined archives
    for i in range(state["args"].number_of_conjoined_keys):
        prefix = random.choice(state["prefixes"])
        key = "".join([prefix, state["separator"], 
                       "conjoined_key_%05d" % (i+1, )])
        log.msg("starting conjoined archive for %r" % (key, ), 
                logLevel=logging.DEBUG)

        consumer = BufferedConsumer()

        path = compute_start_conjoined_path(key)

        length = random.randint(state["args"].min_conjoined_file_size, 
                                state["args"].max_conjoined_file_size)

        state["key-data"][key] = {"length"              : length,
                                  "md5"                 : md5(),
                                  "version-identifier"  : None}

        state["conjoined-data"][key] = {"conjoined-identifier": None}

        deferred = start_collection_request(state["identity"],
                                            "POST", 
                                            state["collection-name"],
                                            path, 
                                            response_consumer=consumer)
        deferred.addCallback(_start_conjoined_result, state, key, consumer)
        deferred.addErrback(_start_conjoined_error, state, key)

        _pending_start_conjoined_count += 1
def start_retrieve_stream_tests(state):
    """
    start a deferred request for retrieve
    """
    global _pending_retrieve_stream_test_count

    for key in state["key-data"].keys():
        log.msg("retrieving key '%s'" % (key, ), logLevel=logging.DEBUG)

        consumer = TestStreamConsumer()

        path = compute_retrieve_path(key)
        deferred = start_collection_request(state["identity"],
                                            "GET", 
                                            state["collection-name"],
                                            path,
                                            response_consumer=consumer)
        deferred.addCallback(_retrieve_data, state, key, consumer)
        deferred.addErrback(_retrieve_error, state, key)

        _pending_retrieve_stream_test_count += 1
def _finish_conjoined(state):
    global _pending_finish_conjoined_count

    # finish all the conjoined archives
    for key, entry in state["conjoined-data"].items():
        log.msg("finishing conjoined archive for %r %s" % 
                (key, entry["conjoined-identifier"], ), 
                logLevel=logging.DEBUG)

        consumer = BufferedConsumer()

        path = compute_finish_conjoined_path(key, 
                                             entry["conjoined-identifier"])

        deferred = start_collection_request(state["identity"],
                                            "POST", 
                                            state["collection-name"],
                                            path, 
                                            response_consumer=consumer)
        deferred.addCallback(_finish_conjoined_result, state, key, consumer)
        deferred.addErrback(_finish_conjoined_error, state, key)

        _pending_finish_conjoined_count += 1
def start_list_versions_tests(state):
    """
    start a deferred request for list_versions, with and without key prefixes
    """
    global _pending_list_versions_test_count
    prefixes = ["", ]
    prefixes.extend(state["prefixes"])

    for prefix in prefixes:
        log.msg("listing versions for prefix '%s'" % (prefix, ), 
                logLevel=logging.DEBUG)

        consumer = BufferedConsumer()

        path = compute_list_versions_path(prefix=prefix)
        deferred = start_collection_request(state["identity"],
                                            "GET", 
                                            state["collection-name"],
                                            path,
                                            response_consumer=consumer)
        deferred.addCallback(_list_versions_result, state, prefix, consumer)
        deferred.addErrback(_list_versions_error, state, prefix)

        _pending_list_versions_test_count += 1
def _archive_conjoined(state):
    global _pending_archive_count

    for key, entry in state["conjoined-data"].items():

        conjoined_identifier = entry["conjoined-identifier"]

        length = random.randint(state["args"].min_conjoined_file_size, 
                                state["args"].max_conjoined_file_size)
        _key_content[key] = \
            "".join([random.choice(printable) for _ in range(length)])\

        state["key-data"][key] = {"length"            : length,
                                  "md5"               : md5(_key_content[key]),
                                  "version-identifier": None}

        slice_start = 0
        slice_end = slice_start + state["args"].max_conjoined_part_size

        conjoined_part = 0
        while slice_start < length:
            conjoined_part += 1

            producer_name = "%s_%03d" % (key, conjoined_part)

            if slice_end <= length:
                producer_length = state["args"].max_conjoined_part_size
            else:
                producer_length = length - slice_start 

            consumer = BufferedConsumer()

            producer = PassThruProducer(producer_name, producer_length)

            path = \
                compute_archive_path(key, 
                                     conjoined_identifier=conjoined_identifier,\
                                     conjoined_part=conjoined_part)

            deferred = start_collection_request(state["identity"],
                                                "POST", 
                                                state["collection-name"],
                                                path, 
                                                response_consumer=consumer, 
                                                body_producer=producer)
            deferred.addCallback(_archive_result, 
                                 state, 
                                 key, 
                                 conjoined_part, 
                                 consumer)
            deferred.addErrback(_archive_error, state, key, conjoined_part)

            _pending_archive_count += 1

            # loop on callLater until all archive is complete
            feed_delay = random.uniform(state["args"].min_feed_delay, 
                                        state["args"].max_feed_delay)

            reactor.callLater(feed_delay, 
                              _feed_producer, 
                              key, 
                              conjoined_part, 
                              producer, 
                              slice_start,
                              slice_end,
                              state)

            slice_start = slice_end
            slice_end = slice_start + state["args"].max_conjoined_part_size