def sg_batch_encrypt_secrets(secrets_to_encrypt, seed_pub_hash_hex, sg_api_token): ''' Batch method for sg_encrypt_secret Local encryption takes longer the longer your data to encrypt is. If you're encrypting a large document, batching will not really speed up the whole process (and complicates your code). If you're encrypting lots of small data points (say SSNs), then batching your API calls in groups of 5-500 can have large performance increases. ''' assert sg_api_token, 'sg_api_token required' is_valid, err_msg = is_valid_seed_hex(seed_pub_hash_hex) if not is_valid: raise Exception('Invalid `seed_pub_hash_hex`: %s' % err_msg) assert type(secrets_to_encrypt) in (list, tuple), "secrets_to_encrypt must be a list or tuple" assert len(secrets_to_encrypt) < 1000, "Max of 1000 secrets to encrypt" api_response = get_encryption_info( seed_pub_hash=seed_pub_hash_hex, api_token=sg_api_token, num_keys=len(secrets_to_encrypt), version='v1', ) if 'error' in api_response: raise Exception(api_response['error']) if 'errors' in api_response: raise Exception(api_response['errors']) to_store_list = [] for cnt, obj in enumerate(api_response): seed_and_nonce = '%s@%s' % (seed_pub_hash_hex, obj['nonce']) secret_message = secrets_to_encrypt[cnt] to_store = encrypt( secret_message=secret_message, key=obj['key'][:KEY_LENGTH_IN_BYTES], iv=None, ) prefix, encoding, b64_iv_and_ciphertext = to_store.split('$') # add seed_pub_hash_hex & nonce to_store_with_sg_data = '$'.join( ( prefix, encoding, seed_and_nonce, # added b64_iv_and_ciphertext, ) ) to_store_list.append(to_store_with_sg_data) return to_store_list
def test_encryption(self): for key in (self.byte_key, self.str_key): for secret_message in (self.byte_message, self.str_message): assert decrypt(encrypt(secret_message, key), key) == secret_message