def run_loop(self,
                 image_capture,
                 time_file,
                 sym_key="a" * 16,
                 interval=3600):
        while True:
            try:
                timer_chunk = TimeKeeper()
                total_id = timer_chunk.start_clock_unique()
                timestamp_data = int(time.time())
                block_id = (timestamp_data - self.start_time) / interval
                # Take a picture
                picture_name = "%s%d.jpg" % (self.name, block_id)
                image_capture(picture_name)
                print picture_name

                # load image
                with open(picture_name, 'r') as f:
                    picture = f.read()

                chunk_tmp = ChunkData()

                chunk_tmp.add_entry(
                    PictureEntry(timestamp_data,
                                 picture_name,
                                 picture,
                                 time_keeper=timer_chunk))

                cur_time = timer()
                cloud_chunk = self._generate_cloud_chunk(
                    block_id, sym_key, chunk_tmp, timer_chunk)
                chunk_creation = timer() - cur_time

                len_normal = len(chunk_tmp.encode(use_compression=False))
                len_compressed = len(
                    compress_data(chunk_tmp.encode(use_compression=True)))

                cloud_chunk_encoded = cloud_chunk.encode()
                length_final = len(cloud_chunk_encoded)
                cur_time = timer()
                self._store_to_cloud(cloud_chunk_encoded)
                chunk_store = timer() - cur_time

                times = timer_chunk.logged_times

                time_file.write(
                    "%s, %s, %s, %s, %s, %s, %d, %d, %d,\n" %
                    (times['chunk_compression'], times['gcm_encryption'],
                     times['ecdsa_signature'],
                     times['time_lepton_compression'], chunk_creation * 1000,
                     chunk_store * 1000, len_normal, len_compressed,
                     length_final))
                time_file.flush()
                timer_chunk.stop_clock_unique('time_total', total_id)
                time.sleep(interval -
                           int(timer_chunk.logged_times['time_total'] / 1000))
            except RuntimeError as e:
                print e.message
                logging.error("Exception occured %s" % e.message)
示例#2
0
def run_benchmark_s3_talos(num_rounds,
                           out_logger,
                           bucket_name,
                           private_key=BitcoinVersionedPrivateKey(PRIVATE_KEY),
                           policy_nonce=base64.b64decode(NONCE),
                           stream_id=STREAMID,
                           txid=TXID,
                           chunk_size=100000,
                           do_delete=True,
                           do_sig=False):
    key = os.urandom(32)
    owner = private_key.public_key().address()
    identifier = DataStreamIdentifier(owner, stream_id, policy_nonce, txid)
    vc_client = TalosVCRestClient()
    storage = TalosS3Storage(bucket_name)

    for round_bench in range(num_rounds):
        try:
            time_keeper = TimeKeeper()
            #chunk_data = generate_data(size=chunk_size)
            chunk_data = DummyData(8500)

            global_id = time_keeper.start_clock_unique()
            chunk = generate_random_chunk_from_data(chunk_data,
                                                    private_key,
                                                    round_bench,
                                                    identifier,
                                                    time_keeper=time_keeper)
            store_chunk(storage,
                        vc_client,
                        chunk,
                        time_keeper=time_keeper,
                        do_sig=do_sig)
            time_keeper.stop_clock_unique("time_s3_store_chunk", global_id)

            token_time_id = time_keeper.start_clock_unique()
            token = generate_query_token(owner, stream_id, str(bytearray(16)),
                                         chunk.key, private_key)
            time_keeper.stop_clock_unique("time_token_create", token_time_id)

            global_id = time_keeper.start_clock_unique()
            chunk = fetch_chunk(storage,
                                vc_client,
                                token,
                                global_id=global_id,
                                time_keeper=time_keeper)
            data = chunk.get_and_check_chunk_data(aes_key,
                                                  compression_used=False,
                                                  time_keeper=time_keeper,
                                                  do_decode=False)
            time_keeper.stop_clock_unique("time_s3_get_chunk", global_id)

            if chunk is None:
                print "Round %d error" % round_bench
            else:
                print "Round %d ok Chunk size: %d" % (round_bench,
                                                      len(chunk.encode()))

            out_logger.log_times_keeper(time_keeper)
        except Exception as e:
            print "Round %d error: %s" % (round_bench, e)
    print "DONE"
    if do_delete:
        clean_bucket(storage.s3, bucket_name)