Esempio n. 1
0
    def check_check_func(self):
        key = BitcoinVersionedPrivateKey(
            "cN5YgNRq8rbcJwngdp3fRzv833E7Z74TsF8nB6GhzRg8Gd9aGWH1")
        client = TalosVCRestClient()
        chunk = generate_random_chunk(1)
        policy = client.get_policy_with_txid(chunk.get_tag_hex())

        def get_bitcoin_address_for_pubkey_tmp(hex_pubkey):
            before = timer()
            priv = extract_bin_bitcoin_pubkey(hex_pubkey)
            hash_priv = get_bin_hash160(priv)
            addr = bin_hash160_to_address(hash_priv, version_byte=111)
            print "Time creation %s" % ((timer() - before) * 1000, )
            return addr

        def check_access_allowed_tmp(hex_pubkey, policy):
            before = timer()
            addr = get_bitcoin_address_for_pubkey_tmp(str(hex_pubkey))
            print "Bitcoin_lib %s" % ((timer() - before) * 1000, )
            if addr == policy.owner:
                return True
            if addr in policy.shares:
                return True
            return False

        self.assertTrue(
            check_access_allowed_tmp(key.public_key().to_hex(), policy))
Esempio n. 2
0
    def test_storage1(self):
        key = BitcoinVersionedPrivateKey(
            "cN5YgNRq8rbcJwngdp3fRzv833E7Z74TsF8nB6GhzRg8Gd9aGWH1")
        talosStorage = TalosLevelDBDHTStorage("db_tmp")
        client = TalosVCRestClient()
        num_iter = 100
        for i in range(num_iter):
            chunk = generate_random_chunk(i)
            policy = client.get_policy_with_txid(chunk.get_tag_hex())
            before = timer()
            talosStorage.store_check_chunk(chunk, i, policy)
            print "Time store %s" % ((timer() - before) * 1000, )
            keeper = TimeKeeper()
            before = timer()
            talosStorage.get_check_chunk(chunk.key,
                                         key.public_key().to_hex(),
                                         policy,
                                         time_keeper=keeper)
            print "Time get %s" % ((timer() - before) * 1000, )
        count = 0
        for (key, value) in talosStorage.iteritemsOlderThan(100):
            count += 1
        self.assertEquals(0, count)

        count = 0
        for (key, value) in talosStorage.iteritems():
            count += 1
        self.assertEquals(num_iter, count)

        time.sleep(6)
        count = 0
        for (key, value) in talosStorage.iteritemsOlderThan(5):
            count += 1
        self.assertEquals(num_iter, count)
Esempio n. 3
0
 def __init__(self, name, start_time, bc_privatekey,
              policy_nonce, stream_id, txid, ip='127.0.0.1', port=14000):
     self.name = name
     self.start_time = start_time
     self.bc_privatekey = BitcoinVersionedPrivateKey(bc_privatekey)
     self.policy_nonce = base64.b64decode(policy_nonce)
     self.stream_id = stream_id
     self.txid = txid
     self.ip = ip
     self.port = port
     self.local_private_key = get_priv_key(self.bc_privatekey)
Esempio n. 4
0
 def test_chunk_java(self):
     privkey = "cQ1HBRRvJ9DaV2UZsEf5w1uLAoXjSVpLYVH5dB5hZUWk5jeJ8KCL"
     chunk = CloudChunk.decode(unhexlify(data_chunk_java))
     key = BitcoinVersionedPrivateKey(privkey)
     data_pub = chunk.encode_without_signature()
     print hexlify(data_pub)
     print data_pub_java
     print hexlify(data_pub) == data_pub_java
     print chunk.get_key_hex()
     print chunk.get_tag_hex()
     print chunk.check_signature(
         get_crypto_ecdsa_pubkey_from_bitcoin_hex(
             key.public_key().to_hex()))
Esempio n. 5
0
    def test_key_siwtch(self):
        key = BitcoinVersionedPrivateKey(
            "cN5YgNRq8rbcJwngdp3fRzv833E7Z74TsF8nB6GhzRg8Gd9aGWH1")

        def get_priv_key2(bvpk_private_key):
            return serialization.load_pem_private_key(
                bvpk_private_key.to_pem(),
                password=None,
                backend=default_backend())

        other = get_priv_key(key)
        priv2 = get_priv_key2(key)

        def serialize_priv_key(private_key):
            numbers = private_key.private_numbers()
            return '%x' % numbers.private_value

        ser_priv1 = serialize_priv_key(other)
        ser_priv2 = serialize_priv_key(priv2)
        print "%s\n%s" % (ser_priv1, ser_priv2)

        self.assertEquals(serialize_priv_key(other), serialize_priv_key(priv2))
        self.assertEquals(other.private_numbers(), priv2.private_numbers())

        data = "Hello"
        signature1 = hash_sign_data(other, data)
        signature2 = hash_sign_data(priv2, data)

        self.assertTrue(check_signed_data(other.public_key(), signature2,
                                          data))
        self.assertTrue(check_signed_data(priv2.public_key(), signature1,
                                          data))
Esempio n. 6
0
    def test_storage(self):
        key = BitcoinVersionedPrivateKey(
            "cN5YgNRq8rbcJwngdp3fRzv833E7Z74TsF8nB6GhzRg8Gd9aGWH1")
        talosStorage = LevelDBStorage("db_tmp")
        client = TalosVCRestClient()
        for i in range(100):
            chunk = generate_random_chunk(i)
            policy = client.get_policy_with_txid(chunk.get_tag_hex())
            before = timer()
            talosStorage.store_check_chunk(chunk, i, policy)
            print "Time store %s" % ((timer() - before) * 1000, )
            keeper = TimeKeeper()
            before = timer()
            talosStorage.get_check_chunk(chunk.key,
                                         key.public_key().to_hex(),
                                         policy,
                                         time_keeper=keeper)
            print "Time get %s" % ((timer() - before) * 1000, )

        for (key, value) in talosStorage.db.RangeIter():
            print base64.b64encode(key)
            print base64.b64encode(value)
Esempio n. 7
0
    def test_token_check(self):
        privkey = "cPuiZfHTkWAPhPvMSPetvP1jRarkQ8BRtPrEVuP5PhDsTGrrcm2f"
        dataSign = "64666173646661736661736466647366320000000000000000000000000000000000000000000000000000000000000000"
        tokenS = """{"owner":"dfasdfasfasdfdsf","chunk_key":"AAAAAAAAAAAAAAAAAAAAAA==","stream_id":2,"signature":"MEQCIBtOgOqsBR5K0RQs7MP4ef2oL+ycM9sMklf1OZIdHTH4AiAs+zD8iU5iFQML1OXF9ORFiNwyacF16jMUSTsNoJYXGQ==","nonce":"AAAAAAAAAAAAAAAAAAAAAA==","pubkey":"0222d41a2f7e3fb398cfe320bfcd25712f675c5d916664e3f5132feaecc8a4603f"}"""
        key = BitcoinVersionedPrivateKey(privkey)

        token = QueryToken.from_json(json.loads(tokenS))
        dataSignHere = hexlify(token.get_signature_data())
        print key.to_hex()
        print key.public_key().to_hex()
        print token.pubkey
        print key.public_key().to_hex() == token.pubkey

        print dataSignHere
        print dataSign == dataSignHere

        print "ok?"
        print check_valid(token)
Esempio n. 8
0
def run_benchmark_s3_plain_latency(
        num_rounds,
        out_logger,
        bucket_name,
        private_key=BitcoinVersionedPrivateKey(PRIVATE_KEY),
        policy_nonce=base64.b64decode(NONCE),
        stream_id=STREAMID,
        txid=TXID,
        chunk_size=100000,
        do_delete=False,
        do_comp_data=True):
    key = os.urandom(32)
    identifier = DataStreamIdentifier(private_key.public_key().address(),
                                      stream_id, policy_nonce, txid)
    storage = PlainS3Storage(bucket_name)
    for round_bench in range(num_rounds):
        try:
            time_keeper = TimeKeeper()
            #chunk = generate_data(size=chunk_size, time_keeper=time_keeper)
            chunk = DummyData(8500)
            key_for_chunk = identifier.get_key_for_blockid(round_bench)
            if do_comp_data:
                chunk = generate_random_chunk_from_data(
                    chunk,
                    private_key,
                    round_bench,
                    identifier,
                    time_keeper=time_keeper)
            storage.store_chunk(key_for_chunk, chunk, time_keeper=time_keeper)
            chunk = storage.get_chunk(key_for_chunk,
                                      time_keeper=time_keeper,
                                      do_plain=(not do_comp_data))
            if chunk is None:
                print "Round %d error" % round_bench
            else:
                print "Round %d ok Chunk size: %d" % (round_bench,
                                                      len(chunk.encode()))

            out_logger.log_times_keeper(time_keeper)
        except Exception as e:
            print "Round %d error: %s" % (round_bench, e)
    print "DONE"
    if do_delete:
        clean_bucket(storage.s3, bucket_name)
Esempio n. 9
0
def run_benchmark_s3_talos_fetch(
        num_rounds,
        num_gets,
        out_logger,
        bucket_name,
        private_key=BitcoinVersionedPrivateKey(PRIVATE_KEY),
        policy_nonce=base64.b64decode(NONCE),
        stream_id=STREAMID,
        txid=TXID,
        chunk_size=100000,
        num_threads=None,
        do_store=True,
        do_delete=True,
        avoid_token_create=True):
    key = os.urandom(32)
    owner = private_key.public_key().address()
    identifier = DataStreamIdentifier(owner, stream_id, policy_nonce, txid)
    vc_client = TalosVCRestClient()
    storage = TalosS3Storage(bucket_name)

    num_threads = num_threads or num_gets
    if do_store:
        print "Store in S3"
        for iter in range(num_gets):
            chunk = generate_random_chunk(private_key,
                                          iter,
                                          identifier,
                                          key=key,
                                          size=chunk_size)
            store_chunk(storage, vc_client, chunk)

    if avoid_token_create:
        token_storage = []
        for block_id in range(num_gets):
            token = generate_query_token(
                identifier.owner, identifier.streamid, str(bytearray(16)),
                identifier.get_key_for_blockid(block_id), private_key)
            token_storage.append(token)
    else:
        token_storage = None

    for round in range(num_rounds):
        try:
            time_keeper = TimeKeeper()
            results = [[]] * num_threads
            threads = [
                FetchTalosThread(idx,
                                 results,
                                 TalosS3Storage(bucket_name),
                                 block_id,
                                 private_key,
                                 identifier,
                                 vc_client,
                                 token_store=token_storage)
                for idx, block_id in enumerate(
                    splitting(range(num_gets), num_threads))
            ]
            time_keeper.start_clock()
            map(lambda x: x.start(), threads)
            map(lambda x: x.join(), threads)
            time_keeper.stop_clock("time_fetch_all")
            chunks = [item for sublist in results for item in sublist]
            if len(chunks) == num_gets:
                print "Round %d ok Num results: %d" % (round, num_gets)
            else:
                print "Round %d ok Num results: %d" % (round, num_gets)
            out_logger.log_times_keeper(time_keeper)
        except Exception as e:
            print "Round %d error: %s" % (round, e)
    print "DONE"
    if do_delete:
        clean_bucket(storage.s3, bucket_name)
Esempio n. 10
0
def run_benchmark_s3_talos(num_rounds,
                           out_logger,
                           bucket_name,
                           private_key=BitcoinVersionedPrivateKey(PRIVATE_KEY),
                           policy_nonce=base64.b64decode(NONCE),
                           stream_id=STREAMID,
                           txid=TXID,
                           chunk_size=100000,
                           do_delete=True,
                           do_sig=False):
    key = os.urandom(32)
    owner = private_key.public_key().address()
    identifier = DataStreamIdentifier(owner, stream_id, policy_nonce, txid)
    vc_client = TalosVCRestClient()
    storage = TalosS3Storage(bucket_name)

    for round_bench in range(num_rounds):
        try:
            time_keeper = TimeKeeper()
            #chunk_data = generate_data(size=chunk_size)
            chunk_data = DummyData(8500)

            global_id = time_keeper.start_clock_unique()
            chunk = generate_random_chunk_from_data(chunk_data,
                                                    private_key,
                                                    round_bench,
                                                    identifier,
                                                    time_keeper=time_keeper)
            store_chunk(storage,
                        vc_client,
                        chunk,
                        time_keeper=time_keeper,
                        do_sig=do_sig)
            time_keeper.stop_clock_unique("time_s3_store_chunk", global_id)

            token_time_id = time_keeper.start_clock_unique()
            token = generate_query_token(owner, stream_id, str(bytearray(16)),
                                         chunk.key, private_key)
            time_keeper.stop_clock_unique("time_token_create", token_time_id)

            global_id = time_keeper.start_clock_unique()
            chunk = fetch_chunk(storage,
                                vc_client,
                                token,
                                global_id=global_id,
                                time_keeper=time_keeper)
            data = chunk.get_and_check_chunk_data(aes_key,
                                                  compression_used=False,
                                                  time_keeper=time_keeper,
                                                  do_decode=False)
            time_keeper.stop_clock_unique("time_s3_get_chunk", global_id)

            if chunk is None:
                print "Round %d error" % round_bench
            else:
                print "Round %d ok Chunk size: %d" % (round_bench,
                                                      len(chunk.encode()))

            out_logger.log_times_keeper(time_keeper)
        except Exception as e:
            print "Round %d error: %s" % (round_bench, e)
    print "DONE"
    if do_delete:
        clean_bucket(storage.s3, bucket_name)
Esempio n. 11
0
                        required=False)
    args = parser.parse_args()

    FIELDS_TALOS = [
        "time_s3_store_chunk", "time_s3_get_chunk", "time_create_chunk"
    ]
    FIELDS_TALOS_EXT = [
        "time_s3_store_chunk", "time_s3_get_chunk", "time_create_chunk",
        "time_token_create", "aes_gcm_decrypt", ENTRY_FETCH_POLICY,
        ENTRY_GET_AND_CHECK
    ]
    FIELDS_TALOS_FETCH = ["time_fetch_all"]

    do_comp_plain = True

    private_key = BitcoinVersionedPrivateKey(args.private_key)
    policy_nonce = base64.b64decode(args.nonce)

    if args.log_db is None:
        logger = FileBenchmarkLogger(
            "%s_%d_SYNC_PLAIN_S3.log" % (args.name, args.num_rounds),
            FIELDS_TALOS)
    else:
        logger = SQLLiteBenchmarkLogger(args.log_db, FIELDS_TALOS,
                                        "%s_SYNC_PLAIN" % (args.name, ))

    run_benchmark_s3_plain_latency(args.num_rounds,
                                   logger,
                                   args.bucket_name,
                                   private_key=private_key,
                                   policy_nonce=policy_nonce,
Esempio n. 12
0
#© 2017-2020, ETH Zurich, D-INFK, [email protected]

import base64
import os
import random

import time

from talosstorage.checks import BitcoinVersionedPrivateKey, generate_query_token, get_priv_key
from talosstorage.chunkdata import ChunkData, DoubleEntry, DataStreamIdentifier, create_cloud_chunk

PRIVATE_KEY = BitcoinVersionedPrivateKey("cN5YgNRq8rbcJwngdp3fRzv833E7Z74TsF8nB6GhzRg8Gd9aGWH1")
NONCE = base64.b64decode("OU2HliHRUUZJokNvn84a+A==")
STREAMID = 1
TXID = "8cf71b7ed09acf896b40fc087e56d3d4dbd8cc346a869bb8a81624153c0c2b8c"


def generate_random_chunk(block_id, key=os.urandom(32), size=1000, min_val=0, max_val=30):
    chunk = ChunkData()
    for i in range(size):
        entry = DoubleEntry(int(time.time()), "test", float(random.uniform(min_val, max_val)))
        chunk.add_entry(entry)

    stream_ident = DataStreamIdentifier(PRIVATE_KEY.public_key().address(), STREAMID, NONCE,
                                        TXID)

    return create_cloud_chunk(stream_ident, block_id, get_priv_key(PRIVATE_KEY), 10, key, chunk)


def generate_token(block_id, nonce):
    owner = PRIVATE_KEY.public_key().address()
class ImageProducer(object):
    def __init__(self,
                 name,
                 start_time,
                 bc_privatekey,
                 policy_nonce,
                 stream_id,
                 txid,
                 ip='127.0.0.1',
                 port=14000):
        self.name = name
        self.start_time = start_time
        self.bc_privatekey = BitcoinVersionedPrivateKey(bc_privatekey)
        self.policy_nonce = base64.b64decode(policy_nonce)
        self.stream_id = stream_id
        self.txid = txid
        self.ip = ip
        self.port = port
        self.local_private_key = get_priv_key(self.bc_privatekey)

    def _generate_cloud_chunk(self, block_id, sym_key, chunk, timer_chunk):
        stream_ident = DataStreamIdentifier(
            self.bc_privatekey.public_key().address(), self.stream_id,
            self.policy_nonce, self.txid)

        return create_cloud_chunk(stream_ident,
                                  block_id,
                                  self.local_private_key,
                                  0,
                                  sym_key,
                                  chunk,
                                  time_keeper=timer_chunk)

    def _store_to_cloud(self, chunk_encoded):
        req = requests.post("http://%s:%d/store_chunk" % (self.ip, self.port),
                            data=chunk_encoded)
        return req.reason, req.status_code

    def run_loop(self,
                 image_capture,
                 time_file,
                 sym_key="a" * 16,
                 interval=3600):
        while True:
            try:
                timer_chunk = TimeKeeper()
                total_id = timer_chunk.start_clock_unique()
                timestamp_data = int(time.time())
                block_id = (timestamp_data - self.start_time) / interval
                # Take a picture
                picture_name = "%s%d.jpg" % (self.name, block_id)
                image_capture(picture_name)
                print picture_name

                # load image
                with open(picture_name, 'r') as f:
                    picture = f.read()

                chunk_tmp = ChunkData()

                chunk_tmp.add_entry(
                    PictureEntry(timestamp_data,
                                 picture_name,
                                 picture,
                                 time_keeper=timer_chunk))

                cur_time = timer()
                cloud_chunk = self._generate_cloud_chunk(
                    block_id, sym_key, chunk_tmp, timer_chunk)
                chunk_creation = timer() - cur_time

                len_normal = len(chunk_tmp.encode(use_compression=False))
                len_compressed = len(
                    compress_data(chunk_tmp.encode(use_compression=True)))

                cloud_chunk_encoded = cloud_chunk.encode()
                length_final = len(cloud_chunk_encoded)
                cur_time = timer()
                self._store_to_cloud(cloud_chunk_encoded)
                chunk_store = timer() - cur_time

                times = timer_chunk.logged_times

                time_file.write(
                    "%s, %s, %s, %s, %s, %s, %d, %d, %d,\n" %
                    (times['chunk_compression'], times['gcm_encryption'],
                     times['ecdsa_signature'],
                     times['time_lepton_compression'], chunk_creation * 1000,
                     chunk_store * 1000, len_normal, len_compressed,
                     length_final))
                time_file.flush()
                timer_chunk.stop_clock_unique('time_total', total_id)
                time.sleep(interval -
                           int(timer_chunk.logged_times['time_total'] / 1000))
            except RuntimeError as e:
                print e.message
                logging.error("Exception occured %s" % e.message)