def capsule_side_channel(enacted_policy): signing_keypair = SigningKeypair() data_source = DataSource(policy_pubkey_enc=enacted_policy.public_key, signing_keypair=signing_keypair) message_kit, _signature = data_source.encapsulate_single_message( b"Welcome to the flippering.") return message_kit, data_source
def encrypt_patient_data(policy_pubkey, data_fields, label: bytes = DEFAULT_LABEL, save_as_file: bool = False): data_source = DataSource(policy_pubkey_enc=policy_pubkey, label=label) data_source_public_key = bytes(data_source.stamp) ipfs_api = ipfsapi.connect() kits = list() with open("Merkle_json.json", "r") as read_file: data = json.load(read_file) share=data_fields share_data = {} for i in share: share_data[i] = {'Value' : data[0][i]['Value'], 'Hash' : data[0][i]['Hash']} plaintext = msgpack.dumps(share_data, use_bin_type=True) message_kit, _signature = data_source.encrypt_message(plaintext) kit_bytes = message_kit.to_bytes() kits.append(kit_bytes) data = { 'data_source': data_source_public_key, 'kits': kits, } if save_as_file: with open(PATIENT_DETAIL, "wb") as file: msgpack.dump(data, file, use_bin_type=True) res = ipfs_api.add(PATIENT_DETAIL) return res
def generate_message(policy_pubkey, msg_data, username, label): data_source = DataSource(policy_pubkey_enc=policy_pubkey, label=label) data_source_public_key = bytes(data_source.stamp) now = time.time() kits = list() user_input = { 'name' : username, 'msg': msg_data, 'timestamp': now, } plaintext = msgpack.dumps(user_input, use_bin_type=True) message_kit, _signature = data_source.encrypt_message(plaintext) kit_bytes = message_kit.to_bytes() kits.append(kit_bytes) data = { 'data_source': data_source_public_key, 'kits': kits, } final_msg = msgpack.dumps(data, use_bin_type=True) return final_msg
def capsule_side_channel(enacted_federated_policy): data_source = DataSource(policy_pubkey_enc=enacted_federated_policy.public_key, signing_keypair=SigningKeypair(), label=enacted_federated_policy.label ) message_kit, _signature = data_source.encrypt_message(b"Welcome to the flippering.") return message_kit, data_source
def __init__(self, label, raw_episode, author, n=3, m=2): self.n = n self.m = m policy_end_datetime = maya.now() + datetime.timedelta(days=365) self.label = label self.author = author self.policy = self.author.grant(self.author, self.label, m=m, n=n, expiration=policy_end_datetime) self.data_source = DataSource(policy_pubkey_enc=self.policy.public_key, label=self.label) self.data_source_public_key = bytes(self.data_source.stamp) self.episode_message_kit, self.episode_signature = self.data_source.encapsulate_single_message(raw_episode)
def deliver_purchase(self, to): policy_end_datetime = maya.now() + datetime.timedelta(days=5) policy = author.character.grant(first_buyer.character, self.book.label, m=m, n=n, expiration=policy_end_datetime) author_pubkey = bytes(self.author.character.stamp) data_source = DataSource(policy_pubkey_enc=policy.public_key) message_kit, _signature = data_source.encapsulate_single_message( self.book.content) data_source_public_key = bytes(data_source.stamp) return (author_pubkey, policy.public_key, data_source_public_key, self.book.label, message_kit)
def generate_heartbeat_data(gen_time, policy_pubkey_hex, last_heart_rate): if int(gen_time) == 0: # button has not been clicked as yet or interval triggered before click return None label = 'heart-data' policy_pubkey = UmbralPublicKey.from_bytes(bytes.fromhex(policy_pubkey_hex)) if not cached_data_source: data_source = DataSource(policy_pubkey_enc=policy_pubkey, label=label) data_source_public_key = bytes(data_source.stamp) data = { 'data_source_pub_key': data_source_public_key, } with open(DATA_SOURCE_INFO_FILE, "wb") as file: msgpack.dump(data, file, use_bin_type=True) cached_data_source.append(data_source) else: data_source = cached_data_source[0] if last_heart_rate is not None: last_heart_rate = int(last_heart_rate) else: last_heart_rate = 80 heart_rate = random.randint(max(60, last_heart_rate - 5), min(100, last_heart_rate + 5)) plaintext = msgpack.dumps(heart_rate, use_bin_type=True) message_kit, _signature = data_source.encrypt_message(plaintext) kit_bytes = message_kit.to_bytes() timestamp = time.time() df = pd.DataFrame.from_dict({ 'Timestamp': [timestamp], 'EncryptedData': [kit_bytes.hex()], }) # add new heartbeat data db_conn = sqlite3.connect(DB_FILE) try: df.to_sql(name=DB_NAME, con=db_conn, index=False, if_exists='append') print("Added heart rate️ measurement to db:", timestamp, "-> ❤", heart_rate) finally: db_conn.close() return heart_rate
class PublicEdekProtectedEpisode(object): """Keeps the individual episode label, data and policy configuration. Data is encrypted so it's absolutely secure to keep it public Attributes: label: public unencrypted label used in search on Ursulas policy: policy on Ursula (provisione for each episode) data_source: the decryption abstraction, see pyUmbral docs episode_message_kit: the capsule providing the reencryption capabilities, see pyUmbral docs """ def __init__(self, label, raw_episode, author, n=3, m=2): self.n = n self.m = m policy_end_datetime = maya.now() + datetime.timedelta(days=365) self.label = label self.author = author self.policy = self.author.grant(self.author, self.label, m=m, n=n, expiration=policy_end_datetime) self.data_source = DataSource(policy_pubkey_enc=self.policy.public_key, label=self.label) self.data_source_public_key = bytes(self.data_source.stamp) self.episode_message_kit, self.episode_signature = self.data_source.encapsulate_single_message(raw_episode) def grant(self, subscriber, hours=1): """Access can be granted on per-subscriber per-episode basis with time limits. 1 hour by default which is pretty enough for streaming app""" policy_end_datetime = maya.now() + datetime.timedelta(hours=hours) self.author.grant(subscriber, self.label, m=self.m, n=self.n, expiration=policy_end_datetime) print("DDRM:Stream:Episode: access granted for the subscriber {} for {} hours".format(subscriber,hours))
def decrypting_msg(data, policy_pubkey, label, arjuns_sig_pubkey, mayank): data = msgpack.loads(data, raw=False) print("afterjson", data) message_kits = (UmbralMessageKit.from_bytes(k) for k in data['kits']) # The mayank also needs to create a view of the Data Source from its public keys data_source = DataSource.from_public_keys( policy_public_key=policy_pubkey, datasource_public_key=data['data_source'], label=label ) # NuCypher network to get a re-encrypted version of each MessageKit. for message_kit in message_kits: try: start = timer() retrieved_plaintexts = mayank.retrieve( message_kit=message_kit, data_source=data_source, alice_verifying_key=arjuns_sig_pubkey ) end = timer() plaintext = msgpack.loads(retrieved_plaintexts[0], raw=False) msg = plaintext['msg'] name = plaintext['name'] timestamp = maya.MayaDT(plaintext['timestamp']) return (name+": "+msg+" ("+str(timestamp)+")") except Exception as e: traceback.print_exc()
def test_alice_can_decrypt(federated_alice): label = b"boring test label" policy_pubkey = federated_alice.get_policy_pubkey_from_label(label) data_source = DataSource(policy_pubkey_enc=policy_pubkey, label=label) message = b"boring test message" message_kit, signature = data_source.encrypt_message(message=message) cleartext = federated_alice.verify_from(stranger=data_source, message_kit=message_kit, signature=signature, decrypt=True) assert cleartext == message
def encrypt_for_policy(self, policy_pubkey: UmbralPublicKey, plaintext: bytes): """ Encrypt data for a Policy :param policy_pubkey: Policy public key :param plaintext: Plaintext bytes to encrypt :return: data_source, message_kit, _signature """ # First we make a DataSource for this policy data_source = DataSource(policy_pubkey_enc=policy_pubkey) # Generate a MessageKit for the policy message_kit, _signature = data_source.encapsulate_single_message( plaintext) return data_source, message_kit, _signature
def generate_vehicular_data(gen_time, policy_pubkey_hex, cached_last_reading): if int(gen_time) == 0: # button has not been clicked as yet or interval triggered before click return None label = 'vehicle-data' policy_pubkey = UmbralPublicKey.from_bytes( bytes.fromhex(policy_pubkey_hex)) if not cached_data_source: data_source = DataSource(policy_pubkey_enc=policy_pubkey, label=label) data_source_public_key = bytes(data_source.stamp) data = { 'data_source_pub_key': data_source_public_key, } with open(DATA_SOURCE_INFO_FILE, "wb") as file: msgpack.dump(data, file, use_bin_type=True) cached_data_source.append(data_source) else: data_source = cached_data_source[0] latest_reading = generate_new_reading(cached_last_reading) plaintext = msgpack.dumps(latest_reading, use_bin_type=True) message_kit, _signature = data_source.encrypt_message(plaintext) kit_bytes = message_kit.to_bytes() timestamp = time.time() df = pd.DataFrame.from_dict({ 'Timestamp': [timestamp], 'EncryptedData': [kit_bytes.hex()], }) # add new vehicle data db_conn = sqlite3.connect(DB_FILE) try: df.to_sql(name=DB_NAME, con=db_conn, index=False, if_exists='append') print("Added vehicle sensor readings to db:", timestamp, " -> ", latest_reading) finally: db_conn.close() return json.dumps(latest_reading)
def decrypt_for_policy(self, label: bytes, message_kit: UmbralMessageKit, alice_pubkey: UmbralPublicKey, bob_privkey: UmbralPrivateKey, policy_pubkey: UmbralPublicKey, data_source_pubkey: UmbralPublicKey): """ Decrypt data for a Policy :param label: A label to represent the policies data :param message_kit: UmbralMessageKit :param alice_pubkey: Alice's public key :param bob_privkey: Bob's private key :param policy_pubkey: Policy's private key :param data_source_pubkey: DataSource's private key :return: The decrypted cleartext """ print('decrypt_for_policy') # Initialize Bob BOB = Bob(crypto_power_ups=[ SigningPower(keypair=SigningKeypair(bob_privkey)), EncryptingPower(keypair=EncryptingKeypair(bob_privkey)) ], known_nodes=(self.ursula, ), federated_only=True, always_be_learning=True) print('-=-=-=') print(label) print(bytes(alice_pubkey)) # Bob joins the policy so that he can receive data shared on it BOB.join_policy( label, # The label - he needs to know what data he's after. bytes(alice_pubkey ), # To verify the signature, he'll need Alice's public key. # verify_sig=True, # And yes, he usually wants to verify that signature. # He can also bootstrap himself onto the network more quickly # by providing a list of known nodes at this time. node_list=[("localhost", 3601)]) print('-=-=-=2') # Bob needs to reconstruct the DataSource. datasource_as_understood_by_bob = DataSource.from_public_keys( policy_public_key=policy_pubkey, datasource_public_key=bytes(data_source_pubkey), label=label) print('-=-=-=3') # NOTE: Not sure if I am doing something wrong or if this is missing # from the serialized bytes message_kit.policy_pubkey = policy_pubkey # Now Bob can retrieve the original message. He just needs the MessageKit # and the DataSource which produced it. cleartexts = BOB.retrieve(message_kit=message_kit, data_source=datasource_as_understood_by_bob, alice_verifying_key=alice_pubkey) print('-=-=-=4') return cleartexts[0]
def generate_heart_rate_samples(policy_pubkey, label: bytes = DEFAULT_LABEL, samples: int = 500, save_as_file: bool = False): data_source = DataSource(policy_pubkey_enc=policy_pubkey, label=label) data_source_public_key = bytes(data_source.stamp) heart_rate = 80 now = time.time() kits = list() for _ in range(samples): # Simulated heart rate data # Normal resting heart rate for adults: between 60 to 100 BPM heart_rate = random.randint(max(60, heart_rate-5), min(100, heart_rate+5)) now += 3 heart_rate_data = { 'heart_rate': heart_rate, 'timestamp': now, } plaintext = msgpack.dumps(heart_rate_data, use_bin_type=True) message_kit, _signature = data_source.encrypt_message(plaintext) kit_bytes = message_kit.to_bytes() kits.append(kit_bytes) data = { 'data_source': data_source_public_key, 'kits': kits, } if save_as_file: with open(HEART_DATA_FILENAME, "wb") as file: msgpack.dump(data, file, use_bin_type=True) return data
def retrieve(): """ Character control endpoint for re-encrypting and decrypting policy data. """ try: request_data = json.loads(request.data) label = b64decode(request_data['label']) policy_pubkey_enc = bytes.fromhex( request_data['policy_encrypting_pubkey']) alice_pubkey_sig = bytes.fromhex( request_data['alice_signing_pubkey']) datasource_pubkey_sig = bytes.fromhex( request_data['datasource_signing_pubkey']) message_kit = b64decode(request_data['message_kit']) except (KeyError, JSONDecodeError) as e: return Response(e, status=400) policy_pubkey_enc = UmbralPublicKey.from_bytes(policy_pubkey_enc) alice_pubkey_sig = UmbralPublicKey.from_bytes(alice_pubkey_sig) message_kit = UmbralMessageKit.from_bytes(message_kit) data_source = DataSource.from_public_keys(policy_pubkey_enc, datasource_pubkey_sig, label=label) drone_bob.join_policy(label=label, alice_pubkey_sig=alice_pubkey_sig) plaintexts = drone_bob.retrieve(message_kit=message_kit, data_source=data_source, alice_verifying_key=alice_pubkey_sig) plaintexts = [ b64encode(plaintext).decode() for plaintext in plaintexts ] response_data = { 'result': { 'plaintext': plaintexts, } } return Response(json.dumps(response_data), status=200)
# But first we need some encrypted data! # Let's read the file produced by the heart monitor and unpack the MessageKits, # which are the individual ciphertexts. #data = msgpack.load(open("patient_details.msgpack", "rb"), raw=False) ipfs_api = ipfsapi.connect('127.0.0.1', 5001) f = open("ipfs.txt", "r") Hashkey = f.read() ipfs_api.get(Hashkey) os.rename(Hashkey, 'patient_details.msgpack') data = msgpack.load(open("patient_details.msgpack", "rb"), raw=False) message_kits = (UmbralMessageKit.from_bytes(k) for k in data['kits']) # The doctor also needs to create a view of the Data Source from its public keys data_source = DataSource.from_public_keys( policy_public_key=policy_pubkey, datasource_public_key=data['data_source'], label=label) # Now he can ask the NuCypher network to get a re-encrypted version of each MessageKit. for message_kit in message_kits: try: start = timer() retrieved_plaintexts = doctor.retrieve( message_kit=message_kit, data_source=data_source, alice_verifying_key=alices_sig_pubkey) end = timer() plaintext = msgpack.loads(retrieved_plaintexts[0], raw=False) # Now we can get the heart rate and the associated timestamp,
# Now that Bob has joined the Policy, let's show how Enrico the Encryptor # can share data with the members of this Policy and then how Bob retrieves it. with open(BOOK_PATH, 'rb') as file: finnegans_wake = file.readlines() print() print("**************James Joyce's Finnegan's Wake**************") print() print("---------------------------------------------------------") for counter, plaintext in enumerate(finnegans_wake): ######################### # Enrico, the Encryptor # ######################### enciro = Enrico(policy_pubkey_enc=policy.public_key) # In this case, the plaintext is a # single passage from James Joyce's Finnegan's Wake. # The matter of whether encryption makes the passage more or less readable # is left to the reader to determine. single_passage_ciphertext, _signature = enciro.encapsulate_single_message( plaintext) data_source_public_key = bytes(enciro.stamp) del enciro ############### # Back to Bob # ############### enrico_as_understood_by_bob = Enrico.from_public_keys(
book = Book(author) first_buyer = Buyer( b"First Buyer's ETH account", Bob(known_nodes=(URSULA, ), federated_only=True, known_certificates_dir=CERTIFICATE_DIR)) book_store_delivery = BookStoreDelivery(book) book_store_contract = BookStoreEthContract( book, author, 10, book_store_delivery.deliver_purchase) author_public_key, policy_public_key, data_source_public_key, label, kit = first_buyer.send_eth_to( book_store_contract, 10) first_buyer.character.join_policy( label, # The label - he needs to know what data he's after. bytes(author.character.stamp ), # To verify the signature, he'll need Alice's public key. # He can also bootstrap himself onto the network more quickly # by providing a list of known nodes at this time. node_list=[("localhost", 3601)]) datasource_as_understood_by_bob = DataSource.from_public_keys( policy_public_key=policy_public_key, datasource_public_key=data_source_public_key, label=label) alice_pubkey_restored_from_ancient_scroll = UmbralPublicKey.from_bytes( author_public_key) delivered_cleartexts = first_buyer.character.retrieve( message_kit=kit, data_source=datasource_as_understood_by_bob, alice_verifying_key=alice_pubkey_restored_from_ancient_scroll) print(delivered_cleartexts)
################################################################################ # ...here. OK, pay attention again. # Now it's time for... ##################### # Using DataSources # ##################### # Now Alice has set a Policy and Bob has joined it. # You're ready to make some DataSources and encrypt for Bob. # It may also be helpful to imagine that you have multiple Bobs, # multiple Labels, or both. # First we make a DataSource for this policy. data_source = DataSource(policy_pubkey_enc=policy.public_key) # Here's how we generate a MessageKit for the Policy. We also get a signature # here, which can be passed via a side-channel (or posted somewhere public as # testimony) and verified if desired. In this case, the plaintext is a # single passage from James Joyce's Finnegan's Wake. # The matter of whether encryption makes the passage more or less readable # is left to the reader to determine. message_kit, _signature = data_source.encapsulate_single_message(plaintext) # The DataSource will want to be able to be verified by Bob, so it leaves # its Public Key somewhere. data_source_public_key = bytes(data_source.stamp) # It can save the MessageKit somewhere (IPFS, etc) and then it too can # choose to disappear (although it may also opt to continue transmitting
def test_bob_joins_policy_and_retrieves(federated_alice, federated_ursulas, certificates_tempdir, ): # Let's partition Ursulas in two parts a_couple_of_ursulas = list(federated_ursulas)[:2] rest_of_ursulas = list(federated_ursulas)[2:] # Bob becomes bob = Bob(federated_only=True, start_learning_now=True, network_middleware=MockRestMiddleware(), abort_on_learning_error=True, known_nodes=a_couple_of_ursulas, ) # Bob only knows a couple of Ursulas initially assert len(bob.known_nodes) == 2 # Alice creates a policy granting access to Bob # Just for fun, let's assume she distributes KFrags among Ursulas unknown to Bob n = NUMBER_OF_URSULAS_IN_DEVELOPMENT_NETWORK - 2 label = b'label://' + os.urandom(32) contract_end_datetime = maya.now() + datetime.timedelta(days=5) policy = federated_alice.grant(bob=bob, label=label, m=3, n=n, expiration=contract_end_datetime, handpicked_ursulas=set(rest_of_ursulas), ) assert bob == policy.bob assert label == policy.label # Now, Bob joins the policy bob.join_policy(label=label, alice_pubkey_sig=federated_alice.stamp, ) # In the end, Bob should know all the Ursulas assert len(bob.known_nodes) == len(federated_ursulas) # DataSource becomes data_source = DataSource(policy_pubkey_enc=policy.public_key, signing_keypair=SigningKeypair(), label=label ) plaintext = b"What's your approach? Mississippis or what?" message_kit, _signature = data_source.encrypt_message(plaintext) alices_verifying_key = federated_alice.stamp.as_umbral_pubkey() # Bob takes the message_kit and retrieves the message within delivered_cleartexts = bob.retrieve(message_kit=message_kit, data_source=data_source, alice_verifying_key=alices_verifying_key) assert plaintext == delivered_cleartexts[0] # Let's try retrieve again, but Alice revoked the policy. failed_revocations = federated_alice.revoke(policy) assert len(failed_revocations) == 0 with pytest.raises(Ursula.NotEnoughUrsulas): _cleartexts = bob.retrieve(message_kit=message_kit, data_source=data_source, alice_verifying_key=alices_verifying_key)
def update_cached_decrypted_measurements_list(read_time, df_json_latest_measurements, bob_id): if int(read_time) == 0: # button never clicked but triggered by interval return None # Let's join the policy generated by Alicia. We just need some info about it. with open(POLICY_INFO_FILE, 'r') as f: policy_data = json.load(f) policy_pubkey = UmbralPublicKey.from_bytes( bytes.fromhex(policy_data['policy_pubkey'])) alices_sig_pubkey = UmbralPublicKey.from_bytes( bytes.fromhex(policy_data['alice_sig_pubkey'])) label = policy_data['label'].encode() if not joined: print("The Insurer joins policy for label '{}' " "and pubkey {}".format(policy_data['label'], policy_data['policy_pubkey'])) bob.join_policy(label, alices_sig_pubkey) joined.append(label) with open(DATA_SOURCE_INFO_FILE, "rb") as file: data_source_metadata = msgpack.load(file, raw=False) df = pd.DataFrame() last_timestamp = time.time() - 5 # last 5s if (df_json_latest_measurements is not None) and (df_json_latest_measurements != ACCESS_REVOKED): df = pd.read_json(df_json_latest_measurements, convert_dates=False) if len(df) > 0: # sort readings and order by timestamp df = df.sort_values(by='timestamp') # use last timestamp last_timestamp = df['timestamp'].iloc[-1] # Bob also needs to create a view of the Data Source from its public keys data_source = DataSource.from_public_keys( policy_public_key=policy_pubkey, datasource_public_key=data_source_metadata['data_source_pub_key'], label=label) db_conn = sqlite3.connect(DB_FILE) try: encrypted_df_readings = pd.read_sql_query( 'SELECT Timestamp, EncryptedData ' 'FROM {} ' 'WHERE Timestamp > "{}" ' 'ORDER BY Timestamp ' 'LIMIT 30;'.format(DB_NAME, last_timestamp), db_conn) for index, row in encrypted_df_readings.iterrows(): kit_bytes = bytes.fromhex(row['EncryptedData']) message_kit = UmbralMessageKit.from_bytes(kit_bytes) # Now he can ask the NuCypher network to get a re-encrypted version of each MessageKit. try: retrieved_plaintexts = bob.retrieve( message_kit=message_kit, data_source=data_source, alice_verifying_key=alices_sig_pubkey) plaintext = msgpack.loads(retrieved_plaintexts[0], raw=False) except Exception as e: print(str(e)) continue readings = plaintext['carInfo'] readings['timestamp'] = row['Timestamp'] df = df.append(readings, ignore_index=True) finally: db_conn.close() # only cache last 30 readings rows_to_remove = len(df) - 30 if rows_to_remove > 0: df = df.iloc[rows_to_remove:] return df.to_json()
def doctor_decrypt(hash_key): globalLogPublisher.addObserver(SimpleObserver()) SEEDNODE_URL = 'localhost:11501' TEMP_DOCTOR_DIR = "{}/doctor-files".format( os.path.dirname(os.path.abspath(__file__))) shutil.rmtree(TEMP_DOCTOR_DIR, ignore_errors=True) ursula = Ursula.from_seed_and_stake_info(seed_uri=SEEDNODE_URL, federated_only=True, minimum_stake=0) from doctor_keys import get_doctor_privkeys doctor_keys = get_doctor_privkeys() bob_enc_keypair = DecryptingKeypair(private_key=doctor_keys["enc"]) bob_sig_keypair = SigningKeypair(private_key=doctor_keys["sig"]) enc_power = DecryptingPower(keypair=bob_enc_keypair) sig_power = SigningPower(keypair=bob_sig_keypair) power_ups = [enc_power, sig_power] print("Creating the Doctor ...") doctor = Bob( is_me=True, federated_only=True, crypto_power_ups=power_ups, start_learning_now=True, abort_on_learning_error=True, known_nodes=[ursula], save_metadata=False, network_middleware=RestMiddleware(), ) print("Doctor = ", doctor) with open("policy-metadata.json", 'r') as f: policy_data = json.load(f) policy_pubkey = UmbralPublicKey.from_bytes( bytes.fromhex(policy_data["policy_pubkey"])) alices_sig_pubkey = UmbralPublicKey.from_bytes( bytes.fromhex(policy_data["alice_sig_pubkey"])) label = policy_data["label"].encode() print("The Doctor joins policy for label '{}'".format( label.decode("utf-8"))) doctor.join_policy(label, alices_sig_pubkey) ipfs_api = ipfsapi.connect() file = ipfs_api.get(hash_key) print(file) os.rename(hash_key, 'patient_details.msgpack') data = msgpack.load(open("patient_details.msgpack", "rb"), raw=False) message_kits = (UmbralMessageKit.from_bytes(k) for k in data['kits']) data_source = DataSource.from_public_keys( policy_public_key=policy_pubkey, datasource_public_key=data['data_source'], label=label) complete_message = [] for message_kit in message_kits: print(message_kit) try: start = timer() retrieved_plaintexts = doctor.retrieve( message_kit=message_kit, data_source=data_source, alice_verifying_key=alices_sig_pubkey) end = timer() plaintext = msgpack.loads(retrieved_plaintexts[0], raw=False) complete_message.append(plaintext) print(plaintext) #with open("details.json", "w") as write_file: # json.dump(plaintext, write_file) except Exception as e: traceback.print_exc() with open("details.json", "w") as write_file: json.dump(complete_message, write_file) return complete_message
def update_cached_decrypted_heartbeats_list(read_time, json_latest_values, bob_id): if int(read_time) == 0: # button never clicked but triggered by interval return None # Let's join the policy generated by Alicia. We just need some info about it. with open(POLICY_INFO_FILE, 'r') as f: policy_data = json.load(f) policy_pubkey = UmbralPublicKey.from_bytes( bytes.fromhex(policy_data['policy_pubkey'])) alices_sig_pubkey = UmbralPublicKey.from_bytes( bytes.fromhex(policy_data['alice_sig_pubkey'])) label = policy_data['label'].encode() if not joined: print("The Doctor joins policy for label '{}' " "and pubkey {}".format(policy_data['label'], policy_data['policy_pubkey'])) bob.join_policy(label, alices_sig_pubkey) joined.append(label) with open(DATA_SOURCE_INFO_FILE, "rb") as file: data_source_metadata = msgpack.load(file, raw=False) cached_hb_values = collections.OrderedDict() if (json_latest_values is not None) and (json_latest_values != ACCESS_REVOKED): cached_hb_values = json.loads( json_latest_values, object_pairs_hook=collections.OrderedDict) last_timestamp = time.time() - 5 # last 5s if len(cached_hb_values) > 0: # use last timestamp last_timestamp = list(cached_hb_values.keys())[-1] # Bob also needs to create a view of the Data Source from its public keys data_source = DataSource.from_public_keys( policy_public_key=policy_pubkey, datasource_public_key=data_source_metadata['data_source_pub_key'], label=label) db_conn = sqlite3.connect(DB_FILE) try: df = pd.read_sql_query( 'SELECT Timestamp, EncryptedData ' 'FROM {} ' 'WHERE Timestamp > "{}" ' 'ORDER BY Timestamp;'.format(DB_NAME, last_timestamp), db_conn) for index, row in df.iterrows(): kit_bytes = bytes.fromhex(row['EncryptedData']) message_kit = UmbralMessageKit.from_bytes(kit_bytes) # Now he can ask the NuCypher network to get a re-encrypted version of each MessageKit. try: retrieved_plaintexts = bob.retrieve( message_kit=message_kit, data_source=data_source, alice_verifying_key=alices_sig_pubkey) hb = msgpack.loads(retrieved_plaintexts[0], raw=False) except Exception as e: print(str(e)) continue timestamp = row['Timestamp'] cached_hb_values[timestamp] = hb finally: db_conn.close() # only cache last 30s while len(cached_hb_values) > 30: cached_hb_values.popitem(False) return json.dumps(cached_hb_values)
def reproduce_stored_session(policy_pubkey_bytes: bytes, label: bytes = DEFAULT_LABEL, save_as_file: bool = False, send_by_mqtt: bool = False, store_in_db: bool = False): policy_pubkey = UmbralPublicKey.from_bytes(policy_pubkey_bytes) data_source = DataSource(policy_pubkey_enc=policy_pubkey, label=label) data_source_public_key = bytes(data_source.stamp) # path of session database file sessionPath = RECORDED_CAR_SESSION if send_by_mqtt: # Connect to MQTT platform client = mqtt.Client() client.username_pw_set(MQTT_USERNAME, MQTT_PASSWD) client.connect(MQTT_HOST, MQTT_PORT, 60) # Communication is starting: send public key client.publish("/Alicia_Car_Data/public_key", pub_key_bytes) client.publish("/Alicia_Car_Data/data_source_public_key", data_source_public_key) # Message kits list kits = list() try: # Connection to saved session database db = sqlite3.connect(sessionPath) tripCurs = db.cursor() gpsCurs = db.cursor() obdCurs = db.cursor() beacons_dataCurs = db.cursor() # Data Base cursor for beacons data table beacons_dataCurs.execute("SELECT * FROM beacons_data") # take the first beacon data row from table beacons_dataRow = beacons_dataCurs.fetchone() # everytime that engine stop and start during session saving, new trip is created for trip in tripCurs.execute("SELECT * FROM trip"): start = trip[1] end = trip[2] nextTime = None for gpsRow in gpsCurs.execute( "SELECT * FROM gps WHERE time>=(?) AND time<(?)", (start, end)): # if this is not the first iteration... if nextTime != None: currentTime = nextTime nextTime = gpsRow[6] # time difference between two samples diff = nextTime - currentTime # sleep the thread: simulating gps signal delay #time.sleep(0.01) # take the same sample from obd table obdCurs.execute("SELECT * FROM obd WHERE time=(?)", (currentTime, )) obdRow = obdCurs.fetchone() # obtained information about OBDII & GPS from sessions database temp = int(obdRow[0]) rpm = int(obdRow[1]) vss = int(obdRow[2]) maf = obdRow[3] throttlepos = obdRow[4] lat = gpsRow[0] lon = gpsRow[1] alt = gpsRow[2] gpsSpeed = gpsRow[3] course = int(gpsRow[4]) gpsTime = int(gpsRow[5]) # structure for generating msgpack car_data = { "carInfo": { "engineOn": True, "temp": temp, "rpm": rpm, "vss": vss, "maf": maf, "throttlepos": throttlepos, "lat": lat, "lon": lon, "alt": alt, "gpsSpeed": gpsSpeed, "course": course, "gpsTime": gpsTime } } plaintext = msgpack.dumps(car_data, use_bin_type=True) message_kit, _signature = data_source.encrypt_message( plaintext) kit_bytes = message_kit.to_bytes() kits.append(kit_bytes) if send_by_mqtt: client.publish("/Alicia_Car_Data", kit_bytes) if store_in_db: df = pd.DataFrame.from_dict({ 'Timestamp': [time.time()], 'EncryptedData': [kit_bytes.hex()], }) # add new vehicle data db_conn = sqlite3.connect(DB_FILE) df.to_sql(name=DB_NAME, con=db_conn, index=False, if_exists='append') print('Added vehicle sensor readings to db: ', car_data) else: nextTime = gpsRow[6] finally: if db: db.close() if send_by_mqtt: client.publish("/Alicia_Car_Data/end", "end") data = { 'data_source': data_source_public_key, 'kits': kits, } if save_as_file: with open(DATA_FILENAME, "wb") as file: msgpack.dump(data, file, use_bin_type=True) data_json = json.dumps(car_data) return data_json