def generate_data(tag="test", size=1000, max_float=1000, time_keeper=TimeKeeper()): chunk = ChunkData() for i in range(size): entry = DoubleEntry(int(time.time()), tag, random.uniform(0, max_float)) chunk.add_entry(entry) return chunk
def generate_random_chunk(block_id, key=os.urandom(32), size=1000, min_val=0, max_val=30): chunk = ChunkData() for i in range(size): entry = DoubleEntry(int(time.time()), "test", float(random.uniform(min_val, max_val))) chunk.add_entry(entry) stream_ident = DataStreamIdentifier(PRIVATE_KEY.public_key().address(), STREAMID, NONCE, TXID) return create_cloud_chunk(stream_ident, block_id, get_priv_key(PRIVATE_KEY), 10, key, chunk)
def generate_random_chunk(private_key, block_id, stream_identifier, tag="test", key=os.urandom(32), size=1000, max_float=1000): chunk = ChunkData() for i in range(size): entry = DoubleEntry(int(time.time()), tag, random.uniform(0, max_float)) chunk.add_entry(entry) return create_cloud_chunk(stream_identifier, block_id, get_priv_key(private_key), 0, key, chunk)
def extract_eth_plug_data(data_dir, chunk_size, set_id, plug_id): cur_chunk = ChunkData(max_size=chunk_size) dir_set = os.path.join(data_dir, to_string_id(set_id)) if os.path.isdir(dir_set): files_plug = os.path.join(dir_set, to_string_id(plug_id)) if os.path.isdir(files_plug): for day_file in sorted(os.listdir(files_plug)): print day_file match = pattern_data.match(day_file) if match: timestamp = date_to_unixstamp(match.group(1)) with open(os.path.join(files_plug, day_file), 'r') as data_feed: for line in data_feed: data_item = DoubleEntry(timestamp, "device-%d-%d" % (set_id, plug_id), float(line)) if not cur_chunk.add_entry(data_item): yield cur_chunk cur_chunk = ChunkData(max_size=chunk_size) cur_chunk.add_entry(data_item) timestamp += 1 if len(cur_chunk.entries) > 0: yield cur_chunk