def test_null_char_in_blob(cql, table1): p = random_string() v = random_bytes() + bytes([0]) + random_bytes() # sanity check: verify that Python actually put the null in the blob... assert 0 in v stmt = cql.prepare(f'INSERT INTO {table1} (p, b) VALUES (?, ?)') cql.execute(stmt, [p, v]) assert v == cql.execute(f"SELECT b FROM {table1} WHERE p='{p}'").one().b
def encryption_oracle(msg_b): msg_b = (util.random_bytes(random.randrange(5,11)) + msg_b + util.random_bytes(random.randrange(5, 11))) key_b = util.random_bytes(16) if random.getrandbits(1): # use ecb return (util.aes_ecb_enc(util.pkcs7pad(msg_b, 16), key_b), True) else: # use cbc return (util.cbc_enc(msg_b, key_b, iv=util.random_bytes(16)), False)
def test_scan_paging_bytes(test_table_b): # We will not Scan the entire table - we have no idea what it contains. # But we don't need to scan the entire table - we just need the table # to contain at least two items, and then Scan it with Limit=1 and stop # after one page. Before #7768 was fixed, the test failed when the # LastEvaluatedKey in the response could not be parsed. items = [{'p': random_bytes()}, {'p': random_bytes()}] with test_table_b.batch_writer() as batch: for item in items: batch.put_item(item) response = test_table_b.scan(ConsistentRead=True, Limit=1) assert 'LastEvaluatedKey' in response
def test_null_in_bytes(test_table_b): p = random_bytes() + bytes([0]) + random_bytes() val = random_bytes() + bytes([0]) + random_bytes() # sanity check: varify that Python actually put the null in the bytes... assert 0 in p assert 0 in val test_table_b.put_item(Item={'p': p, 'val': val}) assert test_table_b.get_item(Key={'p': p}, ConsistentRead=True)['Item'] == { 'p': p, 'val': val }
def encryption_oracle(plaintext: bytes): key = random_AES_key() pre = random_bytes(random.randint(5, 10)) post = random_bytes(random.randint(5, 10)) text = pre + plaintext + post padded = pkcs7_padding(text, 16) if random.random() < 0.5: return cbc_encrypt(key, padded) else: return ecb_encrypt(key, padded)
def host(): email, A = yield [] password = USERS[email] salt = random_bytes(16) x = hash_to_int(salt + password) v = mod(G, x) b = dh_secret(P) B = mod(G, b) u = random_int_from_n_bytes(128 // 8) user_mac, *_ = yield [salt, B, u] S = mod(A * mod(v, u), b) key = hashlib.sha256(int_to_bytes(S)).digest() host_mac = hmac_sha256(key, salt) print("=" * 80) print("{} trying to log in".format(email.decode())) print("host: A:", A) print("host: B:", B) print("host: salt:", bytes_to_int(salt)) print("host: u", u) print("host: x:", x) print("host: S:", S) print("host: key:", binascii.hexlify(key)) yield ["OK" if user_mac == host_mac else "NO"]
def host(): email, A = yield [] password = USERS[email] salt = random_bytes(16) x = hash_to_int(salt + password) print("s: x:", x) v = mod(G, x) b = dh_secret(P) B = K * v + mod(G, b) print("{} trying to log in".format(email.decode())) u = hash_to_int(int_to_bytes(A) + int_to_bytes(B)) print("host: A:", A) print("host: B:", B) print("host: salt:", bytes_to_int(salt)) print("host: u", u) print("host: x:", x) user_mac, *_ = yield [salt, B] t0 = A * mod(v, u) S = mod(t0, b) print("host: S:", S) key = hashlib.sha256(int_to_bytes(S)).digest() print("host: key:", binascii.hexlify(key)) host_mac = hmac_sha256(key, salt) yield ["OK" if user_mac == host_mac else "NO"]
def test_large_blob_hash_key(test_table_b): b = random_bytes(2048) test_table_b.put_item(Item={'p': b}) assert test_table_b.get_item(Key={'p': b}, ConsistentRead=True)['Item'] == { 'p': b }
def test_put_item_wrong_key_type(test_table): b = random_bytes() s = random_string() n = Decimal("3.14") # Should succeed (correct key types) test_table.put_item(Item={'p': s, 'c': s}) assert test_table.get_item(Key={ 'p': s, 'c': s }, ConsistentRead=True)['Item'] == { 'p': s, 'c': s } # Should fail (incorrect hash key types) with pytest.raises(ClientError, match='ValidationException'): test_table.put_item(Item={'p': b, 'c': s}) with pytest.raises(ClientError, match='ValidationException'): test_table.put_item(Item={'p': n, 'c': s}) # Should fail (incorrect sort key types) with pytest.raises(ClientError, match='ValidationException'): test_table.put_item(Item={'p': s, 'c': b}) with pytest.raises(ClientError, match='ValidationException'): test_table.put_item(Item={'p': s, 'c': n}) # Should fail (missing hash key) with pytest.raises(ClientError, match='ValidationException'): test_table.put_item(Item={'c': s}) # Should fail (missing sort key) with pytest.raises(ClientError, match='ValidationException'): test_table.put_item(Item={'p': s})
def test_update_item_wrong_key_type(test_table, test_table_s): b = random_bytes() s = random_string() n = Decimal("3.14") # Should succeed (correct key types) test_table.update_item(Key={'p': s, 'c': s}, AttributeUpdates={}) assert test_table.get_item(Key={ 'p': s, 'c': s }, ConsistentRead=True)['Item'] == { 'p': s, 'c': s } # Should fail (incorrect hash key types) with pytest.raises(ClientError, match='ValidationException'): test_table.update_item(Key={'p': b, 'c': s}, AttributeUpdates={}) with pytest.raises(ClientError, match='ValidationException'): test_table.update_item(Key={'p': n, 'c': s}, AttributeUpdates={}) # Should fail (incorrect sort key types) with pytest.raises(ClientError, match='ValidationException'): test_table.update_item(Key={'p': s, 'c': b}, AttributeUpdates={}) with pytest.raises(ClientError, match='ValidationException'): test_table.update_item(Key={'p': s, 'c': n}, AttributeUpdates={}) # Should fail (missing hash key) with pytest.raises(ClientError, match='ValidationException'): test_table.update_item(Key={'c': s}, AttributeUpdates={}) # Should fail (missing sort key) with pytest.raises(ClientError, match='ValidationException'): test_table.update_item(Key={'p': s}, AttributeUpdates={}) # Should fail (spurious key columns) with pytest.raises(ClientError, match='ValidationException'): test_table.get_item(Key={'p': s, 'c': s, 'spurious': s}) with pytest.raises(ClientError, match='ValidationException'): test_table_s.get_item(Key={'p': s, 'c': s})
def test_put_broken(): """ Test: Error returns of broken CAS PUTs """ fp = random_fp() data = random_bytes(100) meta = { "fp_algo": "test", "lib": "veintidos_unittests", "compression": "no", } flattened_meta = [{"key": k, "val": v} for k, v in meta.iteritems()] assert_raises(Error, ioctx.execute, fp, "cas", "put", json.dumps({"meta": flattened_meta})) assert_raises(Error, ioctx.execute, fp, "cas", "put", json.dumps({"data": base64.b64encode(data)})) assert_raises(Error, ioctx.execute, fp, "cas", "put", json.dumps({"data": binascii.b2a_hex(data)})) assert_raises(Error, ioctx.execute, fp, "cas", "put", "") assert_raises(Error, ioctx.execute, fp, "cas", "put", "{}") assert_raises(Error, ioctx.execute, fp, "cas", "put", "[]")
def test_get_item_wrong_key_type(test_table, test_table_s): b = random_bytes() s = random_string() n = Decimal("3.14") # Should succeed (correct key types) but have empty result assert not "Item" in test_table.get_item(Key={ 'p': s, 'c': s }, ConsistentRead=True) # Should fail (incorrect hash key types) with pytest.raises(ClientError, match='ValidationException'): test_table.get_item(Key={'p': b, 'c': s}) with pytest.raises(ClientError, match='ValidationException'): test_table.get_item(Key={'p': n, 'c': s}) # Should fail (incorrect sort key types) with pytest.raises(ClientError, match='ValidationException'): test_table.get_item(Key={'p': s, 'c': b}) with pytest.raises(ClientError, match='ValidationException'): test_table.get_item(Key={'p': s, 'c': n}) # Should fail (missing hash key) with pytest.raises(ClientError, match='ValidationException'): test_table.get_item(Key={'c': s}) # Should fail (missing sort key) with pytest.raises(ClientError, match='ValidationException'): test_table.get_item(Key={'p': s}) # Should fail (spurious key columns) with pytest.raises(ClientError, match='ValidationException'): test_table.get_item(Key={'p': s, 'c': s, 'spurious': s}) with pytest.raises(ClientError, match='ValidationException'): test_table_s.get_item(Key={'p': s, 'c': s})
def test_key_conditions_hash_only_b(test_table_b): p = random_bytes() item = {'p': p, 'val': 'hello'} test_table_b.put_item(Item=item) got_items = full_query(test_table_b, KeyConditions={ 'p' : {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}}) assert(got_items == [item])
def test_delete_item_wrong_key_type(test_table, test_table_s): b = random_bytes() s = random_string() n = Decimal("3.14") # Should succeed (correct key types) test_table.delete_item(Key={'p': s, 'c': s}) # Should fail (incorrect hash key types) with pytest.raises(ClientError, match='ValidationException'): test_table.delete_item(Key={'p': b, 'c': s}) with pytest.raises(ClientError, match='ValidationException'): test_table.delete_item(Key={'p': n, 'c': s}) # Should fail (incorrect sort key types) with pytest.raises(ClientError, match='ValidationException'): test_table.delete_item(Key={'p': s, 'c': b}) with pytest.raises(ClientError, match='ValidationException'): test_table.delete_item(Key={'p': s, 'c': n}) # Should fail (missing hash key) with pytest.raises(ClientError, match='ValidationException'): test_table.delete_item(Key={'c': s}) # Should fail (missing sort key) with pytest.raises(ClientError, match='ValidationException'): test_table.delete_item(Key={'p': s}) # Should fail (spurious key columns) with pytest.raises(ClientError, match='ValidationException'): test_table.delete_item(Key={'p': s, 'c': s, 'spurious': s}) with pytest.raises(ClientError, match='ValidationException'): test_table_s.delete_item(Key={'p': s, 'c': s})
def test_bytes_hash_key(test_table_b): # Bytes values are passed using base64 encoding, which has weird cases # depending on len%3 and len%4. So let's try various lengths. for len in range(10,18): p = random_bytes(len) val = random_string() test_table_b.put_item(Item={'p': p, 'attribute': val}) assert test_table_b.get_item(Key={'p': p}, ConsistentRead=True)['Item'] == {'p': p, 'attribute': val}
def test_cas_put_get(): """ Test: `get(put(x)) == x` with random buffer content """ cas = CAS(ioctx_cas) data_in = random_bytes(99) obj_name = cas.put(data_in) eq_buffer(data_in, cas.get(obj_name))
def test_large_blob_sort_key(test_table_sb): s = random_string() b = random_bytes(1024) test_table_sb.put_item(Item={'p': s, 'c': b}) assert test_table_sb.get_item(Key={ 'p': s, 'c': b }, ConsistentRead=True)['Item'] == { 'p': s, 'c': b }
def test_query_paging_bytes(test_table_sb): p = random_string() items = [{'p': p, 'c': random_bytes()} for i in range(10)] with test_table_sb.batch_writer() as batch: for item in items: batch.put_item(item) # Deliberately pass Limit=1 to enforce paging even though we have # just 10 items in the partition. got_items = full_query(test_table_sb, Limit=1, KeyConditions={'p': {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}}) got_sort_keys = [x['c'] for x in got_items] expected_sort_keys = sorted(x['c'] for x in items) assert got_sort_keys == expected_sort_keys
def test_large_blob_attribute(test_table): p = random_string() c = random_string() b = random_bytes(409500) # a bit less than 400KB test_table.put_item(Item={'p': p, 'c': c, 'attribute': b}) assert test_table.get_item(Key={ 'p': p, 'c': c }, ConsistentRead=True)['Item'] == { 'p': p, 'c': c, 'attribute': b }
def test_bytes_sort_key(test_table_sb): p = random_string() c = random_bytes() val = random_string() test_table_sb.put_item(Item={'p': p, 'c': c, 'attribute': val}) assert test_table_sb.get_item(Key={ 'p': p, 'c': c }, ConsistentRead=True)['Item'] == { 'p': p, 'c': c, 'attribute': val }
def test_chunker_put_get_single(): """ Test: read(write(x)) = x for x filling only a single chunk """ cas = CAS(ioctx_cas) chunker = Chunker(cas, ioctx_index) data_in = StringIO(random_bytes(42)) obj_name = random_id() version = chunker.write_full(obj_name, data_in) data_out = StringIO() chunker.read_full(obj_name, data_out, version) eq_buffer(data_in.getvalue(), data_out.getvalue())
def encrypt(who: bytes, key: bytes, msg: bytes) -> Tuple[bytes, bytes]: iv = random_bytes(16) padded = pkcs7_padding(msg, 16) ct = cbc_encrypt(key, padded, iv) print(b" ".join([ who + b":", b"encrypting", msg, b"using", binascii.hexlify(key), b"=>", binascii.hexlify(ct), b",", binascii.hexlify(iv), ]).decode()) return ct, iv
def test_chunker_put_get_multiple_fraction(): """ Test: read(write(x)) = x for x spread over multiple chunks. With partially filled chunks """ cas = CAS(ioctx_cas) chunker = Chunker(cas, ioctx_index) data_in = StringIO(random_bytes(int(chunker.chunk_size * 1.5))) obj_name = random_id() version = chunker.write_full(obj_name, data_in) data_out = StringIO() chunker.read_full(obj_name, data_out, version) eq_buffer(data_in.getvalue(), data_out.getvalue())
def test_chunker_versions(): """ Test: versions / head_version returns version of last write_full. Single write_full """ cas = CAS(ioctx_cas) chunker = Chunker(cas, ioctx_index) data_in = StringIO(random_bytes(10 * 1024**1)) obj_name = random_id() version = chunker.write_full(obj_name, data_in) eq(len(chunker.versions(obj_name)), 1) eq(version, chunker.head_version(obj_name)) eq(version, chunker.versions(obj_name)[0])
def test_chunker_put_get_multiple_fraction(): """ Test: read(write(x)) = x for x spread over multiple chunks. With partially filled chunks """ cas = CAS(ioctx_cas) chunker = Chunker(cas, ioctx_index) data_in = StringIO(random_bytes(int(chunker.chunk_size*1.5))) obj_name = random_id() version = chunker.write_full(obj_name, data_in) data_out = StringIO() chunker.read_full(obj_name, data_out, version) eq_buffer(data_in.getvalue(), data_out.getvalue())
def test_chunker_versions(): """ Test: versions / head_version returns version of last write_full. Single write_full """ cas = CAS(ioctx_cas) chunker = Chunker(cas, ioctx_index) data_in = StringIO(random_bytes(10*1024**1)) obj_name = random_id() version = chunker.write_full(obj_name, data_in) eq(len(chunker.versions(obj_name)), 1) eq(version, chunker.head_version(obj_name)) eq(version, chunker.versions(obj_name)[0])
def test_query_sort_order_bytes(test_table_sb): # Insert a lot of random items in one new partition: # We arbitrarily use random_bytes with a random length. p = random_string() items = [{'p': p, 'c': random_bytes(10)} for i in range(128)] with test_table_sb.batch_writer() as batch: for item in items: batch.put_item(item) got_items = full_query(test_table_sb, KeyConditions={'p': {'AttributeValueList': [p], 'ComparisonOperator': 'EQ'}}) assert len(items) == len(got_items) sort_keys = [x['c'] for x in items] got_sort_keys = [x['c'] for x in got_items] # Boto3's "Binary" objects are sorted as if bytes are signed integers. # This isn't the order that DynamoDB itself uses (byte 0 should be first, # not byte -128). Sorting the byte array ".value" works. assert sorted(got_sort_keys, key=lambda x: x.value) == got_sort_keys assert sorted(sort_keys) == got_sort_keys
def test_up_down(): """ Test: UP/DOWN, check reference counts """ fp = random_fp() data = random_bytes(100) meta = { "fp_algo": "test", "lib": "veintidos_unittests", "compression": "no", } args = { "data": base64.b64encode(data), "meta": [{"key": k, "val": v} for k, v in meta.iteritems()], } jargs = json.dumps(args) # refcount = 1 ret, _ = ioctx.execute(fp, "cas", "put", jargs) eq(ret, 0) # refcount = 2 ret, foo = ioctx.execute(fp, "cas", "up", "") eq(ret, 0) # refcount = 3 ret, _ = ioctx.execute(fp, "cas", "up", "") eq(ret, 0) # refcount = 2 ret, _ = ioctx.execute(fp, "cas", "down", "") eq(ret, 0) # refcount = 1 ret, _ = ioctx.execute(fp, "cas", "down", "") eq(ret, 0) # refcount = 0 => obj gone ret, _ = ioctx.execute(fp, "cas", "down", "") eq(ret, 0) assert_raises(Error, ioctx.execute, fp, "cas", "down", "")
def compressed_cas_put_get(cas): """ Test Utility: `get(put(x)) == x` for given `cas` and x in `{random, 1s, 0s}` """ data_in = random_bytes(8 * 1024**2) obj_name = cas.put(data_in) eq_buffer(data_in, cas.get(obj_name, size=8 * 1024**2)) cas.down(obj_name) data_in = "\xFF" * 11 * 1024**2 obj_name = cas.put(data_in) eq_buffer(data_in, cas.get(obj_name, size=11 * 1024**2)) cas.down(obj_name) data_in = "\x00" * 42 obj_name = cas.put(data_in) eq_buffer(data_in, cas.get(obj_name)) cas.down(obj_name)
def compressed_cas_put_get(cas): """ Test Utility: `get(put(x)) == x` for given `cas` and x in `{random, 1s, 0s}` """ data_in = random_bytes(8*1024**2) obj_name = cas.put(data_in) eq_buffer(data_in, cas.get(obj_name, size=8*1024**2)) cas.down(obj_name) data_in = "\xFF" * 11*1024**2 obj_name = cas.put(data_in) eq_buffer(data_in, cas.get(obj_name, size=11*1024**2)) cas.down(obj_name) data_in = "\x00" * 42 obj_name = cas.put(data_in) eq_buffer(data_in, cas.get(obj_name)) cas.down(obj_name)
def test_chunker_multiple_versions(): """ Test: versions / head_version return version of last write_full. Multiple write_full """ cas = CAS(ioctx_cas) chunker = Chunker(cas, ioctx_index) data_in = random_bytes(42) obj_name = random_id() versions = ( chunker.write_full(obj_name, StringIO(data_in)), chunker.write_full(obj_name, StringIO(data_in)), chunker.write_full(obj_name, StringIO(data_in)), chunker.write_full(obj_name, StringIO(data_in)), chunker.write_full(obj_name, StringIO(data_in)), ) eq(len(versions), len(chunker.versions(obj_name))) eq(versions[-1], chunker.head_version(obj_name)) eq(versions[0], chunker.versions(obj_name)[0])
def test_chunker_no_litter(): """ Test: Write and immediate remove should not leave any object behind """ cas = CAS(ioctx_cas) chunker = Chunker(cas, ioctx_index) data_in = StringIO(random_bytes(chunker.chunk_size * 4)) obj_name = random_id() chunker.write_full(obj_name, data_in) chunker.remove_all_versions(obj_name) cas_objs = [x.key for x in ioctx_cas.list_objects()] index_objs = [x.key for x in ioctx_index.list_objects()] print "CAS objects left:", cas_objs print "Index objects left:", index_objs eq(len(cas_objs), 0) eq(len(index_objs), 0)
def test_chunker_no_litter(): """ Test: Write and immediate remove should not leave any object behind """ cas = CAS(ioctx_cas) chunker = Chunker(cas, ioctx_index) data_in = StringIO(random_bytes(chunker.chunk_size*4)) obj_name = random_id() chunker.write_full(obj_name, data_in) chunker.remove_all_versions(obj_name) cas_objs = [x.key for x in ioctx_cas.list_objects()] index_objs = [x.key for x in ioctx_index.list_objects()] print "CAS objects left:", cas_objs print "Index objects left:", index_objs eq(len(cas_objs), 0) eq(len(index_objs), 0)
def test_put_correct(): """ Test: Regular CAS PUT """ fp = random_fp() data = random_bytes(100) meta = { "fp_algo": "test", "lib": "veintidos_unittests", "compression": "no", } args = { "data": base64.b64encode(data), "meta": [{"key": k, "val": v} for k, v in meta.iteritems()], } jargs = json.dumps(args) ret, _ = ioctx.execute(fp, "cas", "put", jargs) eq(ret, 0)
def test_chunker_remove(): """ Test: remove actually removes - `remove_version(write_full)`: No versions, but index object - `write_full, write_full, remove_all_versions`: Index object gone """ cas = CAS(ioctx_cas) chunker = Chunker(cas, ioctx_index) data_in = random_bytes(42) obj_name = random_id() version = chunker.write_full(obj_name, StringIO(data_in)) chunker.remove_version(obj_name, version) eq(chunker.head_version(obj_name), None) chunker.write_full(obj_name, StringIO(data_in)) chunker.write_full(obj_name, StringIO(data_in)) chunker.remove_all_versions(obj_name) assert_raises(ObjectNotFound, chunker.head_version, obj_name)
import base64 import util mystery_b = base64.b64decode(""" Um9sbGluJyBpbiBteSA1LjAKV2l0aCBteSByYWctdG9wIGRvd24gc28gbXkg aGFpciBjYW4gYmxvdwpUaGUgZ2lybGllcyBvbiBzdGFuZGJ5IHdhdmluZyBq dXN0IHRvIHNheSBoaQpEaWQgeW91IHN0b3A/IE5vLCBJIGp1c3QgZHJvdmUg YnkK """) key_b = util.random_bytes(16) def oracle(msg_b): return util.aes_ecb_enc(util.pkcs7pad(msg_b + mystery_b, 16), key_b) # step 1: discover the block size start_len = len(oracle(bytes())) print(start_len) acc_bytes = b'a' while True: new_len = len(oracle(acc_bytes)) if new_len != start_len: blocksize = new_len - start_len break else: acc_bytes += b'a' print('Detected block size {}'.format(blocksize)) # step 2: detect that the function is using ECB test_str = util.random_bytes(blocksize)
def oracle(msg_b): random_prefix = util.random_bytes(random.randrange(16)) return util.aes_ecb_enc(util.pkcs7pad( random_prefix + msg_b + mystery_b, 16), key_b)
import util key_b = util.random_bytes(16) PREFIX = b'comment1=cooking%20MCs;userdata=' SUFFIX = b';comment2=%20like%20a%20pound%20of%20bacon' GOAL = b';admin=true;' def f1(msg_b): b = PREFIX + msg_b + SUFFIX return util.cbc_enc(util.pkcs7pad(b, 16), key_b) def has_admin(enc_b): msg_b = util.cbc_dec(enc_b, key_b) return GOAL in msg_b normal = f1(b'') switch = util.xor_bytestring(GOAL, (PREFIX + SUFFIX)[16:]) switch = util.xor_bytestring(normal[:16], switch) print(has_admin(switch + normal[len(switch):]))