def start_single_part_archives(state):
    """
    start a group of deferred archive requests
    """
    global _pending_archive_count

    log.msg("starting user_name = %s collection = %s" % (
            state["identity"].user_name, 
            state["collection-name"], ), 
            logLevel=logging.DEBUG)

    # start all the keys archiving
    for i in range(state["args"].number_of_single_part_keys):
        prefix = random.choice(state["prefixes"])
        key = "".join([prefix, state["separator"], 
                      "single_part_key_%05d" % (i+1, )])
        log.msg("starting archive for %r" % (key, ), logLevel=logging.DEBUG)

        consumer = BufferedConsumer()

        path = compute_archive_path(key)

        length = random.randint(state["args"].min_single_part_file_size, 
                                state["args"].max_single_part_file_size)
        producer = PassThruProducer(key, length)

        state["key-data"][key] = {"length"              : length,
                                  "md5"                 : md5(),
                                  "version-identifier"  : None}

        deferred = start_collection_request(state["identity"],
                                            "POST", 
                                            state["collection-name"],
                                            path, 
                                            response_consumer=consumer, 
                                            body_producer=producer)
        deferred.addCallback(_archive_result, state, key, consumer)
        deferred.addErrback(_archive_error, state, key)

        _pending_archive_count += 1

        # loop on callLater until all archive is complete
        feed_delay = random.uniform(state["args"].min_feed_delay, 
                                    state["args"].max_feed_delay)

        reactor.callLater(feed_delay, _feed_producer, key, producer, state)
def _archive_conjoined(state):
    global _pending_archive_count

    for key, entry in state["conjoined-data"].items():

        conjoined_identifier = entry["conjoined-identifier"]

        length = random.randint(state["args"].min_conjoined_file_size, 
                                state["args"].max_conjoined_file_size)
        _key_content[key] = \
            "".join([random.choice(printable) for _ in range(length)])\

        state["key-data"][key] = {"length"            : length,
                                  "md5"               : md5(_key_content[key]),
                                  "version-identifier": None}

        slice_start = 0
        slice_end = slice_start + state["args"].max_conjoined_part_size

        conjoined_part = 0
        while slice_start < length:
            conjoined_part += 1

            producer_name = "%s_%03d" % (key, conjoined_part)

            if slice_end <= length:
                producer_length = state["args"].max_conjoined_part_size
            else:
                producer_length = length - slice_start 

            consumer = BufferedConsumer()

            producer = PassThruProducer(producer_name, producer_length)

            path = \
                compute_archive_path(key, 
                                     conjoined_identifier=conjoined_identifier,\
                                     conjoined_part=conjoined_part)

            deferred = start_collection_request(state["identity"],
                                                "POST", 
                                                state["collection-name"],
                                                path, 
                                                response_consumer=consumer, 
                                                body_producer=producer)
            deferred.addCallback(_archive_result, 
                                 state, 
                                 key, 
                                 conjoined_part, 
                                 consumer)
            deferred.addErrback(_archive_error, state, key, conjoined_part)

            _pending_archive_count += 1

            # loop on callLater until all archive is complete
            feed_delay = random.uniform(state["args"].min_feed_delay, 
                                        state["args"].max_feed_delay)

            reactor.callLater(feed_delay, 
                              _feed_producer, 
                              key, 
                              conjoined_part, 
                              producer, 
                              slice_start,
                              slice_end,
                              state)

            slice_start = slice_end
            slice_end = slice_start + state["args"].max_conjoined_part_size