def test_write_many(self): """ Can batch write arbitrary numbers of items """ hash_key = DynamoKey('id', data_type=STRING) self.dynamo.create_table('foobar', hash_key=hash_key) with self.dynamo.batch_write('foobar') as batch: for i in _xrange(50): batch.put({'id': str(i)}) count = self.dynamo.scan('foobar', count=True) self.assertEqual(count, 50) with self.dynamo.batch_write('foobar') as batch: for i in _xrange(50): batch.delete({'id': str(i)}) count = self.dynamo.scan('foobar', count=True) self.assertEqual(count, 0)
def test_count(self): """ Can count items instead of returning the actual items """ self.make_table() with self.dynamo.batch_write('foobar') as batch: for i in _xrange(3): batch.put({'id': 'a', 'num': i}) ret = self.dynamo.query('foobar', count=True, id__eq='a') self.assertEqual(ret, 3)
def test_limit(self): """ Can limit the number of query results """ self.make_table() with self.dynamo.batch_write('foobar') as batch: for i in _xrange(3): batch.put({'id': 'a', 'num': i}) ret = self.dynamo.query('foobar', id__eq='a', limit=1) self.assertEqual(len(list(ret)), 1)
def test_count(self): """ Can count items instead of returning the actual items """ self.make_table() with self.dynamo.batch_write('foobar') as batch: for i in _xrange(3): batch.put({'id': str(i)}) ret = self.dynamo.scan('foobar', count=True) self.assertEqual(ret, 3)
def test_limit(self): """ Can limit the number of scan results """ self.make_table() with self.dynamo.batch_write('foobar') as batch: for i in _xrange(3): batch.put({'id': str(i)}) ret = self.dynamo.scan('foobar', limit=1) self.assertEqual(len(list(ret)), 1)
def test_order_desc(self): """ Can sort the results in descending order """ self.make_table() with self.dynamo.batch_write('foobar') as batch: for i in _xrange(3): batch.put({'id': 'a', 'num': i}) ret = self.dynamo.query('foobar', attributes=['num'], id__eq='a', desc=True) self.assertEqual(list(ret), [{'num': i} for i in range(2, -1, -1)])
def test_get_many(self): """ Can get many items via paging """ self.make_table() keys = [{'id': str(i)} for i in _xrange(50)] with self.dynamo.batch_write('foobar') as batch: for key in keys: batch.put(key) ret = list(self.dynamo.batch_get('foobar', keys)) self.assertItemsEqual(ret, keys)
def test_create_auto_validation_set_string_session_id(self): num_sessions = tc.activity_classifier.util._MIN_NUM_SESSIONS_FOR_SPLIT * 4 _load_data(self, num_examples=10000, max_num_sessions=num_sessions, randomize_num_sessions=False, enforce_all_sessions=True) from six.moves import xrange as _xrange session_ids_dict = {} for i in _xrange(num_sessions): session_ids_dict[i] = uuid.uuid4().hex[:6].upper() self.data[self.session_id] = self.data[self.session_id].apply(lambda x: session_ids_dict[x]) self._create_auto_validation_set()
def save_to_files(self, fragsdir, public_metafile, private_metafile): if not _os.path.exists(fragsdir): _os.makedirs(fragsdir) fragids = self._changed or _xrange(len(self._fragments)) name = "frag_%%0%dd.dat" % len(str(len(self._fragments))) for fragid in fragids: fragment = self._fragments[fragid] assert isinstance(fragment, _BytesIO) fragment.seek(0) destination = _os.path.join(fragsdir, name % fragid) with open(destination, "wb") as fp: _shutil.copyfileobj(fragment, fp) fragment.close() self._fragments[fragid] = destination self._metadata.save_to_file(public_metafile, private=False) self._metadata.save_to_file(private_metafile, private=True)
def _get_next_keys(self): """ Get the next page of keys to fetch """ keys = [] try: # First try to iterate through the keys we were given for _ in _xrange(self.page_size): key = six.next(self.key_iter) keys.append(self.connection.dynamizer.encode_keys(key)) except StopIteration: # If there are no keys left, check the unprocessed keys if not keys: if self.unprocessed_keys: keys = self.unprocessed_keys[:self.page_size] self.unprocessed_keys = \ self.unprocessed_keys[self.page_size:] else: # If we're out of unprocessed keys, we're out of all keys raise StopIteration return keys
def mix_and_slice(data, key, iv, threads=None, to_string=True): """Perform the whole Mix&Slice encryption (mixing and slicing phases). Args: data (bytestr): The data to decrypt. Must be a multiple of MACRO_SIZE. key (bytestr): The key used for AES encryption. Must be 16 bytes long. iv (bytestr): The iv used for AES encryption. Must be 16 bytes long. threads (int): The number of threads used. (default: cpu count). mini_size (int): The miniblock size. (default: provided by the lib). macro_size (int): The macroblock size. (default: provided by the lib). to_string (bool): returns a bytestr if true, ffi.buffer otherwise. Returns: A list of encrypted fragments. """ fragdata = _mixprocess(data, key, iv, _lib.mixslice, to_string, threads) fragview = memoryview(fragdata) size = len(data) fragsize = size // _lib.MINI_PER_MACRO return [fragview[off:off + fragsize] for off in _xrange(0, size, fragsize)]
def _add_widgets(self): """ Add a bunch of widgets with different alpha/beta values """ for i in _xrange(10): w = Widget('a', str(i), alpha=i) w.beta = (i + 5) % 10 self.engine.save(w)
def __init__(self, fragments, metadata, changed=None): self._fragments = fragments self._metadata = metadata self._changed = set(changed if changed is not None else _xrange(len(self._fragments)))