Exemplo n.º 1
0
def main():

    print("Storage Name: %s" % (storage_name))
    print("Block Count: %s" % (block_count))
    print("Block Size: %s" % (MemorySize(block_size)))
    print("Total Memory: %s" % (MemorySize(block_size * block_count)))
    print("Actual Storage Required: %s" % (MemorySize(
        PathORAM.compute_storage_size(
            block_size, block_count, storage_type='mmap'))))
    print("")

    print("Setting Up Path ORAM Storage Locally")
    setup_start = time.time()
    with PathORAM.setup(storage_name,
                        block_size,
                        block_count,
                        storage_type='mmap',
                        ignore_existing=True) as f:
        print("Total Setup Time: %.2f s" % (time.time() - setup_start))
        print("Current Stash Size: %s" % len(f.stash))
        print("Total Data Transmission: %s" %
              (MemorySize(f.bytes_sent + f.bytes_received)))
        print("")

    print("Saving key to file: %s.key" % (storage_name))
    save_private_key(storage_name + ".key", f.key)
    print("Saving stash to file: %s.stash" % (storage_name))
    with open(storage_name + ".stash", 'wb') as fstash:
        pickle.dump(f.stash, fstash)
    print("Saving position map to file: %s.position" % (storage_name))
    with open(storage_name + ".position", 'wb') as fpos:
        pickle.dump(f.position_map, fpos)

    # Start an SSH client using paramiko
    print("Starting SSH Client")
    with paramiko.SSHClient() as ssh:
        ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        ssh.load_system_host_keys()
        ssh.connect(ssh_host, username=ssh_username, password=ssh_password)

        sftp = ssh.open_sftp()

        def my_hook(t):
            def inner(b, total):
                t.total = total
                t.update(b - inner.last_b)
                inner.last_b = b

            inner.last_b = 0
            return inner

        with tqdm.tqdm(desc="Transferring Storage",
                       unit='B',
                       unit_scale=True,
                       miniters=1) as t:
            sftp.put(storage_name, storage_name, callback=my_hook(t))
        sftp.close()

    print("Deleting Local Copy of Storage")
    os.remove(storage_name)
Exemplo n.º 2
0
def main():

    print("Storage Name: %s" % (storage_name))
    print("Block Count: %s" % (block_count))
    print("Block Size: %s" % (MemorySize(block_size)))
    print("Total Memory: %s"
          % (MemorySize(block_size*block_count)))
    print("Actual Storage Required: %s"
          % (MemorySize(
              PathORAM.compute_storage_size(
                  block_size,
                  block_count,
                  storage_type='ram'))))
    print("")

    print("Setting Up Path ORAM Storage")
    setup_start = time.time()
    with PathORAM.setup(storage_name, # RAM storage ignores this argument
                        block_size,
                        block_count,
                        storage_type='ram',
                        ignore_existing=True) as f:
        print("Total Setup Time: %2.f s"
              % (time.time()-setup_start))
        print("Current Stash Size: %s"
              % len(f.stash))
        print("Total Data Transmission: %s"
              % (MemorySize(f.bytes_sent + f.bytes_received)))
        print("")

    # This must be done after closing the file to ensure the lock flag
    # is set to False in the saved data. The tofile method only exists
    # on BlockStorageRAM
    f.raw_storage.tofile(storage_name)

    # We close the device and reopen it after
    # setup to reset the bytes sent and bytes
    # received stats.
    with PathORAM(BlockStorageRAM.fromfile(storage_name),
                  f.stash,
                  f.position_map,
                  key=f.key) as f:

        test_count = 100
        start_time = time.time()
        for t in tqdm.tqdm(list(range(test_count)),
                           desc="Running I/O Performance Test"):
            f.read_block(random.randint(0,f.block_count-1))
        stop_time = time.time()
        print("Current Stash Size: %s"
              % len(f.stash))
        print("Access Block Avg. Data Transmitted: %s (%.3fx)"
              % (MemorySize((f.bytes_sent + f.bytes_received)/float(test_count)),
                 (f.bytes_sent + f.bytes_received)/float(test_count)/float(block_size)))
        print("Access Block Avg. Latency: %.2f ms"
              % ((stop_time-start_time)/float(test_count)*1000))
        print("")

    # cleanup because this is a test example
    os.remove(storage_name)
Exemplo n.º 3
0
def main():

    print("Storage Name: %s" % (storage_name))
    print("Block Count: %s" % (block_count))
    print("Block Size: %s" % (MemorySize(block_size)))
    print("Total Memory: %s" % (MemorySize(block_size * block_count)))
    print("Actual Storage Required: %s" % (MemorySize(
        PathORAM.compute_storage_size(
            block_size, block_count, storage_type='sftp'))))
    print("")

    # Start an SSH client using paramiko
    print("Starting SSH Client")
    with paramiko.SSHClient() as ssh:
        ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        ssh.load_system_host_keys()
        ssh.connect(ssh_host, username=ssh_username, password=ssh_password)

        print("Setting Up Path ORAM Storage")
        setup_start = time.time()
        with PathORAM.setup(storage_name,
                            block_size,
                            block_count,
                            storage_type='sftp',
                            sshclient=ssh,
                            ignore_existing=True) as f:
            print("Total Setup Time: %.2f s" % (time.time() - setup_start))
            print("Current Stash Size: %s" % len(f.stash))
            print("Total Data Transmission: %s" %
                  (MemorySize(f.bytes_sent + f.bytes_received)))
            print("")

        # We close the device and reopen it after
        # setup to reset the bytes sent and bytes
        # received stats.
        with PathORAM(storage_name,
                      f.stash,
                      f.position_map,
                      key=f.key,
                      storage_type='sftp',
                      sshclient=ssh) as f:

            test_count = 100
            start_time = time.time()
            for t in tqdm.tqdm(list(range(test_count)),
                               desc="Running I/O Performance Test"):
                f.read_block(random.randint(0, f.block_count - 1))
            stop_time = time.time()
            print("Current Stash Size: %s" % len(f.stash))
            print("Access Block Avg. Data Transmitted: %s (%.3fx)" %
                  (MemorySize(
                      (f.bytes_sent + f.bytes_received) / float(test_count)),
                   (f.bytes_sent + f.bytes_received) / float(test_count) /
                   float(block_size)))
            print("Fetch Block Avg. Latency: %.2f ms" %
                  ((stop_time - start_time) / float(test_count) * 1000))
            print("")
Exemplo n.º 4
0
def main():

    print("Storage Name: %s" % (storage_name))
    print("Block Count: %s" % (block_count))
    print("Block Size: %s" % (MemorySize(block_size)))
    print("Total Memory: %s" % (MemorySize(block_size * block_count)))
    print("Actual Storage Required: %s" % (MemorySize(
        PathORAM.compute_storage_size(
            block_size, block_count, storage_type='ram'))))
    print("")

    print("Setting Up Path ORAM Storage")
    setup_start = time.time()
    with PathORAM.setup(
            storage_name,  # RAM storage ignores this argument
            block_size,
            block_count,
            storage_type='ram',
            ignore_existing=True) as f:
        print("Total Setup Time: %2.f s" % (time.time() - setup_start))
        print("Current Stash Size: %s" % len(f.stash))
        print("Total Data Transmission: %s" %
              (MemorySize(f.bytes_sent + f.bytes_received)))
        print("")

    # This must be done after closing the file to ensure the lock flag
    # is set to False in the saved data. The tofile method only exists
    # on BlockStorageRAM
    f.raw_storage.tofile(storage_name)

    # We close the device and reopen it after
    # setup to reset the bytes sent and bytes
    # received stats.
    with PathORAM(BlockStorageRAM.fromfile(storage_name),
                  f.stash,
                  f.position_map,
                  key=f.key) as f:

        test_count = 100
        start_time = time.time()
        for t in tqdm.tqdm(list(range(test_count)),
                           desc="Running I/O Performance Test"):
            f.read_block(random.randint(0, f.block_count - 1))
        stop_time = time.time()
        print("Current Stash Size: %s" % len(f.stash))
        print("Access Block Avg. Data Transmitted: %s (%.3fx)" %
              (MemorySize(
                  (f.bytes_sent + f.bytes_received) / float(test_count)),
               (f.bytes_sent + f.bytes_received) / float(test_count) /
               float(block_size)))
        print("Access Block Avg. Latency: %.2f ms" %
              ((stop_time - start_time) / float(test_count) * 1000))
        print("")

    # cleanup because this is a test example
    os.remove(storage_name)
Exemplo n.º 5
0
def main():

    print("Storage Name: %s" % (storage_name))
    print("Block Count: %s" % (block_count))
    print("Block Size: %s" % (MemorySize(block_size)))
    print("Total Memory: %s"
          % (MemorySize(block_size*block_count)))
    print("Actual Storage Required: %s"
          % (MemorySize(
              PathORAM.compute_storage_size(
                  block_size,
                  block_count,
                  storage_type='s3'))))
    print("")

    print("Setting Up Path ORAM Storage")
    setup_start = time.time()
    with PathORAM.setup(storage_name,
                        block_size,
                        block_count,
                        storage_type='s3',
                        bucket_name=bucket_name,
                        ignore_existing=True) as f:
        print("Total Setup Time: %.2f s"
              % (time.time()-setup_start))
        print("Current Stash Size: %s"
              % len(f.stash))
        print("Total Data Transmission: %s"
              % (MemorySize(f.bytes_sent + f.bytes_received)))
        print("")

    # We close the device and reopen it after
    # setup to reset the bytes sent and bytes
    # received stats.
    with PathORAM(storage_name,
                  f.stash,
                  f.position_map,
                  key=f.key,
                  storage_type='s3',
                  bucket_name=bucket_name) as f:

        test_count = 100
        start_time = time.time()
        for t in tqdm.tqdm(list(range(test_count)),
                           desc="Running I/O Performance Test"):
            f.read_block(random.randint(0,f.block_count-1))
        stop_time = time.time()
        print("Current Stash Size: %s"
              % len(f.stash))
        print("Access Block Avg. Data Transmitted: %s (%.3fx)"
              % (MemorySize((f.bytes_sent + f.bytes_received)/float(test_count)),
                 (f.bytes_sent + f.bytes_received)/float(test_count)/float(block_size)))
        print("Fetch Block Avg. Latency: %.2f ms"
              % ((stop_time-start_time)/float(test_count)*1000))
        print("")
Exemplo n.º 6
0
 def test_write_blocks(self):
     data = [bytearray([self._block_count])*self._block_size
             for i in xrange(self._block_count)]
     with PathORAM(self._testfname,
                   self._stash,
                   self._position_map,
                   key=self._key,
                   storage_type=self._type_name,
                   **self._kwds) as f:
         orig = f.read_blocks(list(xrange(self._block_count)))
         self.assertEqual(len(orig), self._block_count)
         for i, block in enumerate(orig):
             self.assertEqual(list(bytearray(block)),
                              list(self._blocks[i]))
         f.write_blocks(list(xrange(self._block_count)),
                        [bytes(b) for b in data])
         new = f.read_blocks(list(xrange(self._block_count)))
         self.assertEqual(len(new), self._block_count)
         for i, block in enumerate(new):
             self.assertEqual(list(bytearray(block)),
                              list(data[i]))
         f.write_blocks(list(xrange(self._block_count)),
                        [bytes(b) for b in self._blocks])
         orig = f.read_blocks(list(xrange(self._block_count)))
         self.assertEqual(len(orig), self._block_count)
         for i, block in enumerate(orig):
             self.assertEqual(list(bytearray(block)),
                              list(self._blocks[i]))
Exemplo n.º 7
0
 def setUp(self):
     self.oram = PathORAM.setup("test",
                                BLOCK_SIZE,
                                BLOCK_COUNT,
                                storage_type="ram",
                                ignore_existing=True)
     self.store = privatekv.store.KVORAM(self.oram)
Exemplo n.º 8
0
def main():

    print("Loading key from file: %s.key"
          % (storage_name))
    key = load_private_key(storage_name+".key")
    print("Loading stash from file: %s.stash"
          % (storage_name))
    with open(storage_name+".stash", 'rb') as fstash:
        stash = pickle.load(fstash)
    print("Loading position map from file: %s.position"
          % (storage_name))
    with open(storage_name+".position", 'rb') as fpos:
        position_map = pickle.load(fpos)

    # Start an SSH client using paramiko
    print("Starting SSH Client")
    with paramiko.SSHClient() as ssh:
        ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        ssh.load_system_host_keys()
        ssh.connect(ssh_host,
                    username=ssh_username,
                    password=ssh_password)

        with PathORAM(storage_name,
                      stash,
                      position_map,
                      key=key,
                      storage_type='sftp',
                      cached_levels=6,
                      concurrency_level=3,
                      threadpool_size=multiprocessing.cpu_count()*2,
                      sshclient=ssh) as f:

            try:

                test_count = 100
                start_time = time.time()
                for t in tqdm.tqdm(list(range(test_count)),
                                   desc="Running I/O Performance Test"):
                    f.read_block(random.randint(0,f.block_count-1))
                stop_time = time.time()
                print("Current Stash Size: %s"
                      % len(f.stash))
                print("Fetch Block Avg. Latency: %.2f ms"
                      % ((stop_time-start_time)/float(test_count)*1000))
                print("")

            finally:

                print("Saving stash to file: %s.stash"
                      % (storage_name))
                with open(storage_name+".stash", 'wb') as fstash:
                    pickle.dump(f.stash, fstash)
                print("Saving position map to file: %s.position"
                      % (storage_name))
                with open(storage_name+".position", 'wb') as fpos:
                    pickle.dump(f.position_map, fpos)
Exemplo n.º 9
0
 def test_compute_level_load(self):
     oram = PathORAM.setup("test",
                           32,
                           1,
                           bucket_capacity=1,
                           storage_type="ram",
                           ignore_existing=True,
                           cached_levels=0)
     result = privatekv.utils.compute_avg_level_load(oram)
     self.assertEqual(1.0, result[0])
     oram.close()
Exemplo n.º 10
0
 def test_init_noexists(self):
     self.assertEqual(os.path.exists(self._dummy_name), False)
     with self.assertRaises(IOError):
         with PathORAM(
                 self._dummy_name,
                 self._stash,
                 self._position_map,
                 key=self._key,
                 storage_type=self._type_name,
                 **self._kwds) as f:
             pass                                   # pragma: no cover
Exemplo n.º 11
0
 def test_write_block(self):
     data = bytearray([self._block_count])*self._block_size
     self.assertEqual(len(data) > 0, True)
     with PathORAM(self._testfname,
                   self._stash,
                   self._position_map,
                   key=self._key,
                   storage_type=self._type_name,
                   **self._kwds) as f:
         for i in xrange(self._block_count):
             self.assertNotEqual(list(bytearray(f.read_block(i))),
                                 list(data))
         for i in xrange(self._block_count):
             f.write_block(i, bytes(data))
         for i in xrange(self._block_count):
             self.assertEqual(list(bytearray(f.read_block(i))),
                              list(data))
         for i, block in enumerate(self._blocks):
             f.write_block(i, bytes(block))
Exemplo n.º 12
0
 def setUpClass(cls):
     assert cls._type_name is not None
     assert cls._aes_mode is not None
     assert not ((cls._test_key is not None) and \
                 (cls._test_key_size is not None))
     assert cls._bucket_capacity is not None
     assert cls._heap_base is not None
     assert cls._kwds is not None
     fd, cls._dummy_name = tempfile.mkstemp()
     os.close(fd)
     try:
         os.remove(cls._dummy_name)
     except OSError:                                # pragma: no cover
         pass                                       # pragma: no cover
     cls._block_size = 25
     cls._block_count = 47
     cls._testfname = cls.__name__ + "_testfile.bin"
     cls._blocks = []
     f = PathORAM.setup(
         cls._testfname,
         cls._block_size,
         cls._block_count,
         bucket_capacity=cls._bucket_capacity,
         heap_base=cls._heap_base,
         key_size=cls._test_key_size,
         key=cls._test_key,
         storage_type=cls._type_name,
         aes_mode=cls._aes_mode,
         initialize=lambda i: bytes(bytearray([i])*cls._block_size),
         ignore_existing=True,
         **cls._kwds)
     f.close()
     cls._key = f.key
     cls._stash = f.stash
     cls._position_map = f.position_map
     for i in range(cls._block_count):
         data = bytearray([i])*cls._block_size
         cls._blocks.append(data)
Exemplo n.º 13
0
 def test_read_blocks(self):
     with PathORAM(self._testfname,
                   self._stash,
                   self._position_map,
                   key=self._key,
                   storage_type=self._type_name,
                   **self._kwds) as f:
         data = f.read_blocks(list(xrange(self._block_count)))
         self.assertEqual(len(data), self._block_count)
         for i, block in enumerate(data):
             self.assertEqual(list(bytearray(block)),
                              list(self._blocks[i]))
         data = f.read_blocks([0])
         self.assertEqual(len(data), 1)
         self.assertEqual(list(bytearray(data[0])),
                          list(self._blocks[0]))
         self.assertEqual(len(self._blocks) > 1, True)
         data = f.read_blocks(list(xrange(1, self._block_count)) + [0])
         self.assertEqual(len(data), self._block_count)
         for i, block in enumerate(data[:-1], 1):
             self.assertEqual(list(bytearray(block)),
                              list(self._blocks[i]))
         self.assertEqual(list(bytearray(data[-1])),
                          list(self._blocks[0]))
Exemplo n.º 14
0
    def test_load_save_client_data(self):
        oram = PathORAM.setup(os.path.join(self.tmpdir.name, "test"),
                              BLOCK_SIZE,
                              BLOCK_COUNT,
                              storage_type="file",
                              ignore_existing=True)
        data = b"hello world"
        data += b"\x00" * (oram.block_size - len(data))
        oram.write_block(0, bytes(data))
        oram.close()
        privatekv.utils.save_oram_client_data(oram, path=self.tmpdir.name)

        key, stash, pm = privatekv.utils.read_oram_client_data(
            "test", path=self.tmpdir.name)
        oram = PathORAM(os.path.join(self.tmpdir.name, "test"),
                        stash,
                        pm,
                        key=key,
                        storage_type="file")
        self.assertEqual(data, oram.read_block(0))
        oram.close()
def randomString(stringLength=10):
    """Generate a random string of fixed length """
    letters = string.ascii_lowercase
    return ''.join(random.choice(letters) for i in range(stringLength))

if __name__ == "__main__":
    ap = ArgumentParser(description="runs the PrivateKV S3 access logging test")
    ap.add_argument("-b", default="test", help="the S3 bucket to use")
    ap.add_argument("-i", default=192, type=int, help="number of inserts")
    ap.add_argument("--create", default=False, action="store_true", help="only create the oram")
    args = ap.parse_args()

    if args.create:
        print("Setting up ORAM")
        pyoram.config.SHOW_PROGRESS_BAR = True
        oram = PathORAM.setup("test", BLOCK_SIZE, BLOCK_COUNT, cached_levels=0, storage_type="s3",
                              bucket_name=args.b, ignore_existing=True)
        kv = privatekv.store.KVORAM(oram)

        keys = set()
        inserts = args.i
        for i in range(inserts):
            key = str(uuid.uuid4())
            kv.put(key, randomString())
            keys.add(key)
            print("inserts %d / %d" % (i, inserts))

        keys_file = open("keys", "w")
        for key in list(keys):
            keys_file.write(key + "\n")
        keys_file.close()
Exemplo n.º 16
0
def main():

    print("Storage Name: %s" % (storage_name))
    print("Block Count: %s" % (block_count))
    print("Block Size: %s" % (MemorySize(block_size)))
    print("Total Memory: %s"
          % (MemorySize(block_size*block_count)))
    print("Actual Storage Required: %s"
          % (MemorySize(
              PathORAM.compute_storage_size(
                  block_size,
                  block_count,
                  storage_type='mmap'))))
    print("")

    print("Setting Up Path ORAM Storage Locally")
    setup_start = time.time()
    with PathORAM.setup(storage_name,
                        block_size,
                        block_count,
                        storage_type='mmap',
                        ignore_existing=True) as f:
        print("Total Setup Time: %.2f s"
              % (time.time()-setup_start))
        print("Current Stash Size: %s"
              % len(f.stash))
        print("Total Data Transmission: %s"
              % (MemorySize(f.bytes_sent + f.bytes_received)))
        print("")

    print("Saving key to file: %s.key"
          % (storage_name))
    save_private_key(storage_name+".key", f.key)
    print("Saving stash to file: %s.stash"
          % (storage_name))
    with open(storage_name+".stash", 'wb') as fstash:
        pickle.dump(f.stash, fstash)
    print("Saving position map to file: %s.position"
          % (storage_name))
    with open(storage_name+".position", 'wb') as fpos:
        pickle.dump(f.position_map, fpos)

    # Start an SSH client using paramiko
    print("Starting SSH Client")
    with paramiko.SSHClient() as ssh:
        ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        ssh.load_system_host_keys()
        ssh.connect(ssh_host,
                    username=ssh_username,
                    password=ssh_password)

        sftp = ssh.open_sftp()

        def my_hook(t):
            def inner(b, total):
                t.total = total
                t.update(b - inner.last_b)
                inner.last_b = b
            inner.last_b = 0
            return inner
        with tqdm.tqdm(desc="Transferring Storage",
                       unit='B',
                       unit_scale=True,
                       miniters=1) as t:
            sftp.put(storage_name,
                     storage_name,
                     callback=my_hook(t))
        sftp.close()

    print("Deleting Local Copy of Storage")
    os.remove(storage_name)
Exemplo n.º 17
0
 def test_init_exists(self):
     self.assertEqual(os.path.exists(self._testfname), True)
     with open(self._testfname, 'rb') as f:
         databefore = f.read()
     # no key
     with self.assertRaises(ValueError):
         with PathORAM(self._testfname,
                       self._stash,
                       self._position_map,
                       storage_type=self._type_name,
                       **self._kwds) as f:
             pass                                   # pragma: no cover
     # stash does not match digest
     with self.assertRaises(ValueError):
         with PathORAM(self._testfname,
                       {1: bytes()},
                       self._position_map,
                       key=self._key,
                       storage_type=self._type_name,
                       **self._kwds) as f:
             pass                                   # pragma: no cover
     # stash hash invalid key (negative)
     with self.assertRaises(ValueError):
         with PathORAM(self._testfname,
                       {-1: bytes()},
                       self._position_map,
                       key=self._key,
                       storage_type=self._type_name,
                       **self._kwds) as f:
             pass                                   # pragma: no cover
     # position map has invalid item (negative)
     with self.assertRaises(ValueError):
         with PathORAM(self._testfname,
                       self._stash,
                       [-1],
                       key=self._key,
                       storage_type=self._type_name,
                       **self._kwds) as f:
             pass                                   # pragma: no cover
     # position map does not match digest
     with self.assertRaises(ValueError):
         with PathORAM(self._testfname,
                       self._stash,
                       [1],
                       key=self._key,
                       storage_type=self._type_name,
                       **self._kwds) as f:
             pass                                   # pragma: no cover
     with self.assertRaises(ValueError):
         with EncryptedHeapStorage(self._testfname,
                                   key=self._key,
                                   storage_type=self._type_name) as fb:
             with PathORAM(fb,
                           self._stash,
                           self._position_map,
                           key=self._key,
                           storage_type=self._type_name,
                           **self._kwds) as f:
                 self.assertIs(f.heap_storage, fb)
                 pass                               # pragma: no cover
     with PathORAM(self._testfname,
                   self._stash,
                   self._position_map,
                   key=self._key,
                   storage_type=self._type_name,
                   **self._kwds) as f:
         self.assertEqual(f.key, self._key)
         self.assertEqual(f.block_size, self._block_size)
         self.assertEqual(f.block_count, self._block_count)
         self.assertEqual(f.storage_name, self._testfname)
         self.assertEqual(f.header_data, bytes())
     self.assertEqual(os.path.exists(self._testfname), True)
     with open(self._testfname, 'rb') as f:
         dataafter = f.read()
     self.assertEqual(databefore[-(self._block_count*self._block_size):],
                      dataafter[-(self._block_count*self._block_size):])
Exemplo n.º 18
0
 def test_setup(self):
     fname = ".".join(self.id().split(".")[1:])
     fname += ".bin"
     fname = os.path.join(thisdir, fname)
     if os.path.exists(fname):
         os.remove(fname)                           # pragma: no cover
     bsize = 10
     bcount = 11
     fsetup = PathORAM.setup(
         fname,
         bsize,
         bcount,
         bucket_capacity=self._bucket_capacity,
         heap_base=self._heap_base,
         key=self._test_key,
         key_size=self._test_key_size,
         aes_mode=self._aes_mode,
         storage_type=self._type_name,
         **self._kwds)
     fsetup.close()
     self.assertEqual(type(fsetup.raw_storage),
                      BlockStorageTypeFactory(self._type_name))
     # test that these can be called with default keyword values
     fsetup.stash_digest(fsetup.stash)
     fsetup.position_map_digest(fsetup.position_map)
     with open(fname, 'rb') as f:
         flen = len(f.read())
         self.assertEqual(
             flen,
             PathORAM.compute_storage_size(
                 bsize,
                 bcount,
                 bucket_capacity=self._bucket_capacity,
                 heap_base=self._heap_base,
                 aes_mode=self._aes_mode,
                 storage_type=self._type_name))
         self.assertEqual(
             flen >
             PathORAM.compute_storage_size(
                 bsize,
                 bcount,
                 bucket_capacity=self._bucket_capacity,
                 heap_base=self._heap_base,
                 aes_mode=self._aes_mode,
                 storage_type=self._type_name,
                 ignore_header=True),
             True)
     with PathORAM(fname,
                   fsetup.stash,
                   fsetup.position_map,
                   key=fsetup.key,
                   storage_type=self._type_name,
                   **self._kwds) as f:
         self.assertEqual(f.header_data, bytes())
         self.assertEqual(fsetup.header_data, bytes())
         self.assertEqual(f.key, fsetup.key)
         self.assertEqual(f.block_size, bsize)
         self.assertEqual(fsetup.block_size, bsize)
         self.assertEqual(f.block_count, bcount)
         self.assertEqual(fsetup.block_count, bcount)
         self.assertEqual(f.storage_name, fname)
         self.assertEqual(fsetup.storage_name, fname)
     os.remove(fname)
def randomString(stringLength=10):
    """Generate a random string of fixed length """
    letters = string.ascii_lowercase
    return ''.join(random.choice(letters) for i in range(stringLength))

if __name__ == "__main__":
    ap = ArgumentParser(description="runs the PrivateKV S3 insertion performance test")
    ap.add_argument("-c", default=0, type=int, help="cached levels")
    ap.add_argument("-b", default="test", help="the S3 bucket to use")
    ap.add_argument("-i", default=2000, type=int, help="number of inserts")
    args = ap.parse_args()

    print("Setting up ORAM")
    pyoram.config.SHOW_PROGRESS_BAR = True
    oram = PathORAM.setup("test", BLOCK_SIZE, BLOCK_COUNT, cached_levels=args.c, storage_type="s3",
                          bucket_name=args.b, ignore_existing=True)
    pyoram.config.SHOW_PROGRESS_BAR = False
    kv = privatekv.store.KVORAM(oram)
    output_file = open("s3_insert_time.csv", "w")

    inserts = args.i
    for i in range(inserts):
        key = uuid.uuid4()
        def do_put():
            kv.put(key, randomString())
        duration = timeit.timeit(do_put, number=1)
        output_file.write("%i,%f\n" % (i+1, duration))
        print("inserts %d / %d" % (i, inserts))

    output_file.close()
    oram.close()
Exemplo n.º 20
0
keyfile_name = "%s.key" % (args.storename)
stashfile_name = "%s.stash" % (args.storename)
positionfile_name = "%s.position" % (args.storename)

with open(keyfile_name, "rb") as keyfile:
    key = keyfile.read()
with open(stashfile_name, "rb") as stashfile:
    stash = pickle.load(stashfile)
with open(positionfile_name, "rb") as positionfile:
    position_map = pickle.load(positionfile)

ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
rsa_key = paramiko.RSAKey.from_private_key_file(args.key)

ssh.connect(args.host, port=args.port, username=args.user, pkey=rsa_key)
sftp = ssh.open_sftp()

with PathORAM(args.storename,
              stash,
              position_map,
              key=key,
              storage_type="sftp",
              cached_levels=6,
              concurrency_level=1,
              sshclient=ssh) as oram:
    oram.read_block(0)

sftp.close()
ssh.close()
Exemplo n.º 21
0
    return ''.join(random.choice(letters) for i in range(stringLength))


if __name__ == "__main__":
    ap = ArgumentParser(
        description="runs the PrivateKV local insert time test")
    ap.add_argument("-n",
                    default=2**15,
                    type=int,
                    help="number of hash buckets")
    args = ap.parse_args()

    print("Setting up ORAM")
    oram = PathORAM.setup("test",
                          BLOCK_SIZE,
                          args.n,
                          storage_type="ram",
                          ignore_existing=True,
                          cached_levels=0)
    kv = privatekv.store.KVORAM(oram)
    output_file = open("insert_time_blocks_%d.csv" % (args.n), "w")

    inserts = 1000000
    for i in range(inserts):
        key = uuid.uuid4()

        def do_put():
            kv.put(key, randomString())

        duration = timeit.timeit(do_put, number=1)
        output_file.write("%i,%f\n" % (i + 1, duration))
        if 0 == i % 1000:
Exemplo n.º 22
0
ap = ArgumentParser(
    description="initialize a Path ORAM and store it on a SFTP server")
ap.add_argument("storename", help="the store name")
ap.add_argument("host", help="the host name")
ap.add_argument("user", help="the user name")
ap.add_argument("key", help="RSA private key file")
ap.add_argument("--port", "-p", default=22)
args = ap.parse_args()

print("Building ORAM...")
with NamedTemporaryFile(delete=False) as tmp:
    tmpname = tmp.name

oram = PathORAM.setup(tmpname,
                      BLOCK_SIZE,
                      BLOCK_COUNT,
                      storage_type="file",
                      ignore_existing=True)
oram.close()

keyfile_name = "%s.key" % (args.storename)
stashfile_name = "%s.stash" % (args.storename)
positionfile_name = "%s.position" % (args.storename)

with open(keyfile_name, "wb") as keyfile:
    keyfile.write(oram.key)
with open(stashfile_name, "wb") as stashfile:
    pickle.dump(oram.stash, stashfile)
with open(positionfile_name, "wb") as positionfile:
    pickle.dump(oram.position_map, positionfile)
Exemplo n.º 23
0
    def test_read_block(self):
        with PathORAM(self._testfname,
                      self._stash,
                      self._position_map,
                      key=self._key,
                      storage_type=self._type_name,
                      **self._kwds) as f:
            for i, data in enumerate(self._blocks):
                self.assertEqual(list(bytearray(f.read_block(i))),
                                 list(self._blocks[i]))
            for i, data in enumerate(self._blocks):
                self.assertEqual(list(bytearray(f.read_block(i))),
                                 list(self._blocks[i]))
            for i, data in reversed(list(enumerate(self._blocks))):
                self.assertEqual(list(bytearray(f.read_block(i))),
                                 list(self._blocks[i]))
            for i, data in reversed(list(enumerate(self._blocks))):
                self.assertEqual(list(bytearray(f.read_block(i))),
                                 list(self._blocks[i]))
        with PathORAM(self._testfname,
                      self._stash,
                      self._position_map,
                      key=self._key,
                      storage_type=self._type_name,
                      **self._kwds) as f:
            self.assertEqual(list(bytearray(f.read_block(0))),
                             list(self._blocks[0]))
            self.assertEqual(list(bytearray(f.read_block(self._block_count-1))),
                             list(self._blocks[-1]))

        # test eviction behavior of the tree oram helper
        with PathORAM(self._testfname,
                      self._stash,
                      self._position_map,
                      key=self._key,
                      storage_type=self._type_name,
                      **self._kwds) as f:
            oram = f._oram
            vheap = oram.storage_heap.virtual_heap
            Z = vheap.blocks_per_bucket
            def _has_vacancies(level):
                return any(oram.path_block_ids[i] == oram.empty_block_id
                           for i in range(level*Z, (level+1)*Z))

            for i in range(len(f.position_map)):
                b = f.position_map[i]
                f.position_map[i] = vheap.random_leaf_bucket()
                oram.load_path(b)
                block = oram.extract_block_from_path(i)
                if block is not None:
                    oram.stash[i] = block

                # track where everyone should be able to move
                # to, unless the bucket fills up
                eviction_levels = {}
                for id_, level in zip(oram.path_block_ids,
                                      oram.path_block_eviction_levels):
                    eviction_levels[id_] = level
                for id_ in oram.stash:
                    block_id, block_addr = \
                        oram.get_block_info(oram.stash[id_])
                    assert block_id == id_
                    eviction_levels[id_] = \
                        vheap.clib.calculate_last_common_level(
                            vheap.k, b, block_addr)

                oram.push_down_path()
                oram.fill_path_from_stash()
                oram.evict_path()

                # check that everyone was pushed down greedily
                oram.load_path(b)
                for pos, id_ in enumerate(oram.path_block_ids):
                    current_level = pos // Z
                    if (id_ != oram.empty_block_id):
                        eviction_level = eviction_levels[id_]
                        self.assertEqual(current_level <= eviction_level, True)
                        if current_level < eviction_level:
                            self.assertEqual(_has_vacancies(eviction_level), False)
                for id_ in oram.stash:
                    self.assertEqual(
                        _has_vacancies(eviction_levels[id_]), False)
Exemplo n.º 24
0
 def test_setup_fails(self):
     self.assertEqual(os.path.exists(self._dummy_name), False)
     with self.assertRaises(IOError):
         PathORAM.setup(
             os.path.join(thisdir,
                          "baselines",
                          "exists.empty"),
             block_size=10,
             block_count=10,
             bucket_capacity=self._bucket_capacity,
             heap_base=self._heap_base,
             key=self._test_key,
             key_size=self._test_key_size,
             aes_mode=self._aes_mode,
             storage_type=self._type_name,
             **self._kwds)
     self.assertEqual(os.path.exists(self._dummy_name), False)
     with self.assertRaises(IOError):
         PathORAM.setup(
             os.path.join(thisdir,
                          "baselines",
                          "exists.empty"),
             block_size=10,
             block_count=10,
             bucket_capacity=self._bucket_capacity,
             heap_base=self._heap_base,
             key=self._test_key,
             key_size=self._test_key_size,
             storage_type=self._type_name,
             aes_mode=self._aes_mode,
             ignore_existing=False,
             **self._kwds)
     self.assertEqual(os.path.exists(self._dummy_name), False)
     with self.assertRaises(ValueError):
         PathORAM.setup(
             self._dummy_name,
             block_size=1,
             block_count=0,
             bucket_capacity=self._bucket_capacity,
             heap_base=self._heap_base,
             key=self._test_key,
             key_size=self._test_key_size,
             aes_mode=self._aes_mode,
             storage_type=self._type_name,
             **self._kwds)
     self.assertEqual(os.path.exists(self._dummy_name), False)
     with self.assertRaises(ValueError):
         PathORAM.setup(
             self._dummy_name,
             block_size=0,
             block_count=1,
             bucket_capacity=self._bucket_capacity,
             heap_base=self._heap_base,
             key=self._test_key,
             key_size=self._test_key_size,
             aes_mode=self._aes_mode,
             storage_type=self._type_name,
             **self._kwds)
     self.assertEqual(os.path.exists(self._dummy_name), False)
     with self.assertRaises(TypeError):
         PathORAM.setup(
             self._dummy_name,
             block_size=1,
             block_count=1,
             bucket_capacity=self._bucket_capacity,
             heap_base=self._heap_base,
             key=self._test_key,
             key_size=self._test_key_size,
             aes_mode=self._aes_mode,
             storage_type=self._type_name,
             header_data=2,
             **self._kwds)
     self.assertEqual(os.path.exists(self._dummy_name), False)
     with self.assertRaises(ValueError):
         PathORAM.setup(
             self._dummy_name,
             block_size=1,
             block_count=1,
             bucket_capacity=self._bucket_capacity,
             heap_base=self._heap_base,
             key=self._test_key,
             key_size=self._test_key_size,
             aes_mode=None,
             storage_type=self._type_name,
             **self._kwds)
     self.assertEqual(os.path.exists(self._dummy_name), False)
     with self.assertRaises(ValueError):
         PathORAM.setup(
             self._dummy_name,
             block_size=1,
             block_count=1,
             bucket_capacity=0,
             heap_base=self._heap_base,
             key=self._test_key,
             key_size=self._test_key_size,
             aes_mode=self._aes_mode,
             storage_type=self._type_name,
             **self._kwds)
     self.assertEqual(os.path.exists(self._dummy_name), False)
     with self.assertRaises(ValueError):
         PathORAM.setup(
             self._dummy_name,
             block_size=1,
             block_count=1,
             bucket_capacity=self._bucket_capacity,
             heap_base=1,
             key=self._test_key,
             key_size=self._test_key_size,
             aes_mode=self._aes_mode,
             storage_type=self._type_name,
             **self._kwds)
     self.assertEqual(os.path.exists(self._dummy_name), False)
     with self.assertRaises(ValueError):
         PathORAM.setup(
             self._dummy_name,
             block_size=1,
             block_count=1,
             bucket_capacity=self._bucket_capacity,
             heap_base=self._heap_base,
             key_size=-1,
             aes_mode=self._aes_mode,
             storage_type=self._type_name,
             **self._kwds)
     self.assertEqual(os.path.exists(self._dummy_name), False)
     with self.assertRaises(TypeError):
         PathORAM.setup(
             self._dummy_name,
             block_size=1,
             block_count=1,
             bucket_capacity=self._bucket_capacity,
             heap_base=self._heap_base,
             key=-1,
             aes_mode=self._aes_mode,
             storage_type=self._type_name,
             **self._kwds)
     self.assertEqual(os.path.exists(self._dummy_name), False)
     with self.assertRaises(ValueError):
         PathORAM.setup(
             self._dummy_name,
             block_size=1,
             block_count=1,
             bucket_capacity=self._bucket_capacity,
             heap_base=self._heap_base,
             key=AES.KeyGen(AES.key_sizes[-1]),
             key_size=AES.key_sizes[-1],
             aes_mode=self._aes_mode,
             storage_type=self._type_name,
             **self._kwds)
     self.assertEqual(os.path.exists(self._dummy_name), False)
     with self.assertRaises(ValueError):
         PathORAM.setup(
             self._dummy_name,
             block_size=1,
             block_count=1,
             bucket_capacity=self._bucket_capacity,
             heap_base=self._heap_base,
             key=os.urandom(AES.key_sizes[-1]+100),
             aes_mode=self._aes_mode,
             storage_type=self._type_name,
             **self._kwds)
     with self.assertRaises(ValueError):
         PathORAM.setup(
             self._dummy_name,
             block_size=1,
             block_count=1,
             heap_height=1,
             bucket_capacity=self._bucket_capacity,
             heap_base=self._heap_base,
             key=self._key,
             aes_mode=self._aes_mode,
             storage_type=self._type_name,
             **self._kwds)
Exemplo n.º 25
0
 def test_setup_withdata(self):
     fname = ".".join(self.id().split(".")[1:])
     fname += ".bin"
     fname = os.path.join(thisdir, fname)
     if os.path.exists(fname):
         os.remove(fname)                           # pragma: no cover
     bsize = 10
     bcount = 11
     header_data = bytes(bytearray([0,1,2]))
     fsetup = PathORAM.setup(
         fname,
         block_size=bsize,
         block_count=bcount,
         bucket_capacity=self._bucket_capacity,
         heap_base=self._heap_base,
         key=self._test_key,
         key_size=self._test_key_size,
         aes_mode=self._aes_mode,
         storage_type=self._type_name,
         header_data=header_data,
         **self._kwds)
     fsetup.close()
     self.assertEqual(type(fsetup.raw_storage),
                      BlockStorageTypeFactory(self._type_name))
     with open(fname, 'rb') as f:
         flen = len(f.read())
         self.assertEqual(
             flen,
             PathORAM.compute_storage_size(
                 bsize,
                 bcount,
                 bucket_capacity=self._bucket_capacity,
                 heap_base=self._heap_base,
                 aes_mode=self._aes_mode,
                 storage_type=self._type_name,
                 header_data=header_data))
         self.assertTrue(len(header_data) > 0)
         self.assertEqual(
             PathORAM.compute_storage_size(
                 bsize,
                 bcount,
                 bucket_capacity=self._bucket_capacity,
                 heap_base=self._heap_base,
                 aes_mode=self._aes_mode,
                 storage_type=self._type_name) <
             PathORAM.compute_storage_size(
                 bsize,
                 bcount,
                 bucket_capacity=self._bucket_capacity,
                 heap_base=self._heap_base,
                 aes_mode=self._aes_mode,
                 storage_type=self._type_name,
                 header_data=header_data),
             True)
         self.assertEqual(
             flen >
             PathORAM.compute_storage_size(
                 bsize,
                 bcount,
                 bucket_capacity=self._bucket_capacity,
                 heap_base=self._heap_base,
                 aes_mode=self._aes_mode,
                 storage_type=self._type_name,
                 header_data=header_data,
                 ignore_header=True),
             True)
     with PathORAM(fname,
                   fsetup.stash,
                   fsetup.position_map,
                   key=fsetup.key,
                   storage_type=self._type_name,
                   **self._kwds) as f:
         self.assertEqual(f.header_data, header_data)
         self.assertEqual(fsetup.header_data, header_data)
         self.assertEqual(f.key, fsetup.key)
         self.assertEqual(f.block_size, bsize)
         self.assertEqual(fsetup.block_size, bsize)
         self.assertEqual(f.block_count, bcount)
         self.assertEqual(fsetup.block_count, bcount)
         self.assertEqual(f.storage_name, fname)
         self.assertEqual(fsetup.storage_name, fname)
     os.remove(fname)
Exemplo n.º 26
0
def main():

    print("Storage Name: %s" % (storage_name))
    print("Block Count: %s" % (block_count))
    print("Block Size: %s" % (MemorySize(block_size)))
    print("Total Memory: %s"
          % (MemorySize(block_size*block_count)))
    print("Actual Storage Required: %s"
          % (MemorySize(
              PathORAM.compute_storage_size(
                  block_size,
                  block_count,
                  storage_type='sftp'))))
    print("")

    # Start an SSH client using paramiko
    print("Starting SSH Client")
    with paramiko.SSHClient() as ssh:
        ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        ssh.load_system_host_keys()
        ssh.connect(ssh_host,
                    username=ssh_username,
                    password=ssh_password)

        print("Setting Up Path ORAM Storage")
        setup_start = time.time()
        with PathORAM.setup(storage_name,
                            block_size,
                            block_count,
                            storage_type='sftp',
                            sshclient=ssh,
                            ignore_existing=True) as f:
            print("Total Setup Time: %.2f s"
                  % (time.time()-setup_start))
            print("Current Stash Size: %s"
                  % len(f.stash))
            print("Total Data Transmission: %s"
                  % (MemorySize(f.bytes_sent + f.bytes_received)))
            print("")

        # We close the device and reopen it after
        # setup to reset the bytes sent and bytes
        # received stats.
        with PathORAM(storage_name,
                      f.stash,
                      f.position_map,
                      key=f.key,
                      storage_type='sftp',
                      sshclient=ssh) as f:

            test_count = 100
            start_time = time.time()
            for t in tqdm.tqdm(list(range(test_count)),
                               desc="Running I/O Performance Test"):
                f.read_block(random.randint(0,f.block_count-1))
            stop_time = time.time()
            print("Current Stash Size: %s"
                  % len(f.stash))
            print("Access Block Avg. Data Transmitted: %s (%.3fx)"
                  % (MemorySize((f.bytes_sent + f.bytes_received)/float(test_count)),
                     (f.bytes_sent + f.bytes_received)/float(test_count)/float(block_size)))
            print("Fetch Block Avg. Latency: %.2f ms"
                  % ((stop_time-start_time)/float(test_count)*1000))
            print("")
Exemplo n.º 27
0
 def test_update_header_data(self):
     fname = ".".join(self.id().split(".")[1:])
     fname += ".bin"
     fname = os.path.join(thisdir, fname)
     if os.path.exists(fname):
         os.remove(fname)                           # pragma: no cover
     bsize = 10
     bcount = 11
     header_data = bytes(bytearray([0,1,2]))
     fsetup = PathORAM.setup(
         fname,
         block_size=bsize,
         block_count=bcount,
         bucket_capacity=self._bucket_capacity,
         heap_base=self._heap_base,
         key=self._test_key,
         key_size=self._test_key_size,
         header_data=header_data,
         **self._kwds)
     fsetup.close()
     new_header_data = bytes(bytearray([1,1,1]))
     with PathORAM(fname,
                   fsetup.stash,
                   fsetup.position_map,
                   key=fsetup.key,
                   storage_type=self._type_name,
                   **self._kwds) as f:
         self.assertEqual(f.header_data, header_data)
         f.update_header_data(new_header_data)
         self.assertEqual(f.header_data, new_header_data)
     with PathORAM(fname,
                   fsetup.stash,
                   fsetup.position_map,
                   key=fsetup.key,
                   storage_type=self._type_name,
                   **self._kwds) as f:
         self.assertEqual(f.header_data, new_header_data)
     with self.assertRaises(ValueError):
         with PathORAM(fname,
                       fsetup.stash,
                       fsetup.position_map,
                       key=fsetup.key,
                       storage_type=self._type_name,
                       **self._kwds) as f:
             f.update_header_data(bytes(bytearray([1,1])))
     with self.assertRaises(ValueError):
         with PathORAM(fname,
                       fsetup.stash,
                       fsetup.position_map,
                       key=fsetup.key,
                       storage_type=self._type_name,
                       **self._kwds) as f:
             f.update_header_data(bytes(bytearray([1,1,1,1])))
     with PathORAM(fname,
                   fsetup.stash,
                   fsetup.position_map,
                   key=fsetup.key,
                   storage_type=self._type_name,
                   **self._kwds) as f:
         self.assertEqual(f.header_data, new_header_data)
     os.remove(fname)
    return ''.join(random.choice(letters) for i in range(stringLength))


if __name__ == "__main__":
    ap = ArgumentParser(description="runs the PrivateKV bucket load test")
    ap.add_argument("-z",
                    default=4,
                    type=int,
                    help="number of blocks in a bucket")
    args = ap.parse_args()

    print("Setting up ORAM")
    oram = PathORAM.setup("test",
                          BLOCK_SIZE,
                          BLOCK_COUNT,
                          storage_type="ram",
                          ignore_existing=True,
                          cached_levels=0,
                          bucket_capacity=args.z)
    kv = privatekv.store.KVORAM(oram)

    n = 1000000
    for i in range(n):
        key = random.randint(0, 100000)
        kv.put(key, randomString())

        if 0 == i % 1000:
            print("inserts %d / %d" % (i, n))

    print("computing level loads")
    loads = privatekv.utils.compute_avg_level_load(oram)
Exemplo n.º 29
0
 def test_locked_flag(self):
     with PathORAM(self._testfname,
                   self._stash,
                   self._position_map,
                   key=self._key,
                   storage_type=self._type_name,
                   **self._kwds) as f:
         with self.assertRaises(IOError):
             with PathORAM(self._testfname,
                           self._stash,
                           self._position_map,
                           key=self._key,
                           storage_type=self._type_name,
                           **self._kwds) as f1:
                 pass                               # pragma: no cover
         with self.assertRaises(IOError):
             with PathORAM(self._testfname,
                           self._stash,
                           self._position_map,
                           key=self._key,
                           storage_type=self._type_name,
                           **self._kwds) as f1:
                 pass                               # pragma: no cover
         with PathORAM(self._testfname,
                       self._stash,
                       self._position_map,
                       key=self._key,
                       storage_type=self._type_name,
                       ignore_lock=True,
                       **self._kwds) as f1:
             pass
         with self.assertRaises(IOError):
             with PathORAM(self._testfname,
                           self._stash,
                           self._position_map,
                           key=self._key,
                           storage_type=self._type_name,
                           **self._kwds) as f1:
                 pass                               # pragma: no cover
         with PathORAM(self._testfname,
                       self._stash,
                       self._position_map,
                       key=self._key,
                       storage_type=self._type_name,
                       ignore_lock=True,
                       **self._kwds) as f1:
             pass
         with PathORAM(self._testfname,
                       self._stash,
                       self._position_map,
                       key=self._key,
                       storage_type=self._type_name,
                       ignore_lock=True,
                       **self._kwds) as f1:
             pass
     with PathORAM(self._testfname,
                   self._stash,
                   self._position_map,
                   key=self._key,
                   storage_type=self._type_name,
                   **self._kwds) as f:
         pass