def test_tmp(self):
     for i in range(100):
         chunk_tmp = ChunkData(entry_type=TYPE_PICTURE_ENTRY)
         chunk_tmp.add_entry(
             PictureEntry(int(time.time()), "bla%d" % i,
                          "sdfasdfasdf%d" % i))
         print chunk_tmp.entries[0].metadata
         print hash(chunk_tmp.entries[0].picture_data)
示例#2
0
def generate_data(tag="test",
                  size=1000,
                  max_float=1000,
                  time_keeper=TimeKeeper()):
    chunk = ChunkData()
    for i in range(size):
        entry = DoubleEntry(int(time.time()), tag,
                            random.uniform(0, max_float))
        chunk.add_entry(entry)
    return chunk
示例#3
0
def generate_random_chunk(block_id, key=os.urandom(32), size=1000, min_val=0, max_val=30):
    chunk = ChunkData()
    for i in range(size):
        entry = DoubleEntry(int(time.time()), "test", float(random.uniform(min_val, max_val)))
        chunk.add_entry(entry)

    stream_ident = DataStreamIdentifier(PRIVATE_KEY.public_key().address(), STREAMID, NONCE,
                                        TXID)

    return create_cloud_chunk(stream_ident, block_id, get_priv_key(PRIVATE_KEY), 10, key, chunk)
示例#4
0
def generate_random_chunk(private_key,
                          block_id,
                          stream_identifier,
                          tag="test",
                          key=os.urandom(32),
                          size=1000,
                          max_float=1000):
    chunk = ChunkData()
    for i in range(size):
        entry = DoubleEntry(int(time.time()), tag,
                            random.uniform(0, max_float))
        chunk.add_entry(entry)
    return create_cloud_chunk(stream_identifier, block_id,
                              get_priv_key(private_key), 0, key, chunk)
def extract_eth_smartmeter_data(data_dir, chunk_size, max_entries=None):
    cur_chunk = ChunkData(max_size=chunk_size, entry_type=TYPE_MULTI_DOUBLE_ENTRY)
    num_entries = 0
    for day_file in sorted(os.listdir(data_dir)):
        print day_file
        match = pattern_data.match(day_file)
        if match:
            timestamp = date_to_unixstamp(match.group(1))
            with open(os.path.join(data_dir, day_file), 'r') as data_feed:
                for line in data_feed:
                    values = map(lambda x: float(x), line.split(","))
                    data_item = MultiDoubleEntry(timestamp, "sm-h1", values)
                    num_entries += 1
                    if not cur_chunk.add_entry(data_item):
                        yield cur_chunk
                        cur_chunk = ChunkData(max_size=chunk_size, entry_type=TYPE_MULTI_DOUBLE_ENTRY)
                        cur_chunk.add_entry(data_item)
                    if num_entries is not None:
                        if num_entries == max_entries:
                            break
                    timestamp += 1
        if num_entries is not None:
            if num_entries == max_entries:
                break
    if len(cur_chunk.entries) > 0:
        yield cur_chunk
示例#6
0
 def get_chunk(self, chunk_key, time_keeper=TimeKeeper(), do_plain=True):
     time_keeper.start_clock()
     chunk = get_data_s3(self.s3, binascii.hexlify(chunk_key),
                         self.bucket_name)
     if do_plain:
         chunk = ChunkData.decode(chunk)
     else:
         chunk = CloudChunk.decode(chunk)
     time_keeper.stop_clock("time_s3_get_chunk")
     return chunk
    def run_loop(self,
                 image_capture,
                 time_file,
                 sym_key="a" * 16,
                 interval=3600):
        while True:
            try:
                timer_chunk = TimeKeeper()
                total_id = timer_chunk.start_clock_unique()
                timestamp_data = int(time.time())
                block_id = (timestamp_data - self.start_time) / interval
                # Take a picture
                picture_name = "%s%d.jpg" % (self.name, block_id)
                image_capture(picture_name)
                print picture_name

                # load image
                with open(picture_name, 'r') as f:
                    picture = f.read()

                chunk_tmp = ChunkData()

                chunk_tmp.add_entry(
                    PictureEntry(timestamp_data,
                                 picture_name,
                                 picture,
                                 time_keeper=timer_chunk))

                cur_time = timer()
                cloud_chunk = self._generate_cloud_chunk(
                    block_id, sym_key, chunk_tmp, timer_chunk)
                chunk_creation = timer() - cur_time

                len_normal = len(chunk_tmp.encode(use_compression=False))
                len_compressed = len(
                    compress_data(chunk_tmp.encode(use_compression=True)))

                cloud_chunk_encoded = cloud_chunk.encode()
                length_final = len(cloud_chunk_encoded)
                cur_time = timer()
                self._store_to_cloud(cloud_chunk_encoded)
                chunk_store = timer() - cur_time

                times = timer_chunk.logged_times

                time_file.write(
                    "%s, %s, %s, %s, %s, %s, %d, %d, %d,\n" %
                    (times['chunk_compression'], times['gcm_encryption'],
                     times['ecdsa_signature'],
                     times['time_lepton_compression'], chunk_creation * 1000,
                     chunk_store * 1000, len_normal, len_compressed,
                     length_final))
                time_file.flush()
                timer_chunk.stop_clock_unique('time_total', total_id)
                time.sleep(interval -
                           int(timer_chunk.logged_times['time_total'] / 1000))
            except RuntimeError as e:
                print e.message
                logging.error("Exception occured %s" % e.message)
def extract_eth_plug_data(data_dir, chunk_size, set_id, plug_id):
    cur_chunk = ChunkData(max_size=chunk_size)
    dir_set = os.path.join(data_dir, to_string_id(set_id))
    if os.path.isdir(dir_set):
        files_plug = os.path.join(dir_set, to_string_id(plug_id))
        if os.path.isdir(files_plug):

            for day_file in sorted(os.listdir(files_plug)):
                print day_file
                match = pattern_data.match(day_file)
                if match:
                    timestamp = date_to_unixstamp(match.group(1))
                    with open(os.path.join(files_plug, day_file), 'r') as data_feed:
                        for line in data_feed:
                            data_item = DoubleEntry(timestamp, "device-%d-%d" % (set_id, plug_id), float(line))
                            if not cur_chunk.add_entry(data_item):
                                yield cur_chunk
                                cur_chunk = ChunkData(max_size=chunk_size)
                                cur_chunk.add_entry(data_item)
                            timestamp += 1
    if len(cur_chunk.entries) > 0:
        yield cur_chunk