def writeData(self, startOffset, dataSlice): if "data" in self.item: self.log.debug("write block %s has data", self.path) itemData = self.item["data"].value self.item['data'] = Binary(itemData[0:startOffset] + dataSlice + itemData[startOffset + len(dataSlice):]) else: self.log.debug("write block %s has NO data", self.path) self.item['data'] = Binary(dataSlice)
def _handle_base64_item(self, operation, parent, key, value): if operation == 'get': pass elif operation == 'put': binary_data = Binary('') # base64 comes in from API, so set directly (minor hack) binary_data.value = value['$data'] assert len(value) == 2, \ 'only $data and $base64 should be set on binary item' parent[key] = binary_data elif operation == 'delete': pass
def truncate(self, length, fh=None): with self.writeLock(): lastBlock = length / self.accessor.BLOCK_SIZE l_time = int(time()) items = self.accessor.blockTablev2.query( blockId__eq=self.record["blockId"], blockNum__gt=lastBlock, attributes=["blockId", "blockNum"]) for entry in items: entry.delete() if length: try: lastItem = self.getBlock(lastBlock, getData=True) if lastItem is not None and "data" in lastItem: lastItem['data'] = Binary(lastItem['data'].value[0:( length % self.accessor.BLOCK_SIZE)]) lastItem.save() except FuseOSError, fe: # Block is missing - so nothing to update if fe.errno == ENOENT: pass else: raise fe item = self.getFirstBlock() item['st_size'] = length item['st_ctime'] = max(l_time, item['st_ctime']) item['st_mtime'] = max(l_time, item['st_mtime']) item.save()
def to_dynamo(self, data): signature = data['signature'] try: signature = Binary(struct.pack(self.STRUCT_FORMAT, *signature)) except Exception: raise FaceSignatureClientError( "Malformed signature {}".format(signature)) return { 'box_id': data['box_id'], 'signature': signature, }
def write(self, data, offset): startBlock = offset / BLOCK_SIZE endBlock = (offset + len(data) - 1) / BLOCK_SIZE initialBlockOffset = BLOCK_SIZE - (offset % BLOCK_SIZE) blockOffset = 0 self.log.debug("write start=%d, last=%d, initial offset %d", startBlock, endBlock, initialBlockOffset) for block in range(startBlock, endBlock + 1): item = self.accessor.getItemOrNone(os.path.join( self.path, str(block)), attrs=["data"]) if item is None: self.log.debug("write block %d is None", block) if not block: # First block must keep the link count and times raise "First block cannot be empty for " + self.path else: item = self.accessor.newItem(attrs={ "path": self.path, "name": str(block) }) dataSlice = data[0:initialBlockOffset] if block == startBlock else \ data[blockOffset: blockOffset + BLOCK_SIZE] self.log.debug("write block %d slice length %d from offset %d", block, len(dataSlice), blockOffset) blockOffset += len(dataSlice) if "data" in item: self.log.debug("write block %d has data", block) itemData = item["data"].value startOffset = (offset % BLOCK_SIZE) if block == startBlock else 0 item['data'] = Binary(itemData[0:startOffset] + dataSlice + itemData[startOffset + len(dataSlice):]) else: self.log.debug("write block %d has NO data", block) item['data'] = Binary(dataSlice) item.save()
def truncate(self, path, length, fh=None): self.log.debug("truncate(%s, %d)", path, length) lastBlock = length / BLOCK_SIZE items = self.table.query( hash_key=path, range_key_condition=(GT(str(lastBlock)) if length else None), attributes_to_get=['key', "range"]) # TODO Pagination for entry in items: entry.delete() if length: lastItem = self.getItemOrNone(os.path.join(path, str(lastBlock)), attrs=["data"]) if lastItem is not None and "data" in lastItem: lastItem['data'] = Binary( lastItem['data'].value[0:(length % BLOCK_SIZE)]) lastItem.save() item = self.getItemOrThrow(path, attrs=['st_size']) item['st_size'] = length item.save()
def test_binary_attrs(self): c = self.dynamodb schema = c.create_schema(self.hash_key_name, self.hash_key_proto_value, self.range_key_name, self.range_key_proto_value) index = int(time.time()) table_name = 'test-%d' % index read_units = 5 write_units = 5 table = self.create_table(table_name, schema, read_units, write_units) table.refresh(wait_for_active=True) item1_key = 'Amazon S3' item1_range = 'S3 Thread 1' item1_attrs = { 'Message': 'S3 Thread 1 message text', 'LastPostedBy': 'User A', 'Views': 0, 'Replies': 0, 'Answered': 0, 'BinaryData': Binary('\x01\x02\x03\x04'), 'BinarySequence': set([Binary('\x01\x02'), Binary('\x03\x04')]), 'Tags': set(['largeobject', 'multipart upload']), 'LastPostDateTime': '12/9/2011 11:36:03 PM' } item1 = table.new_item(item1_key, item1_range, item1_attrs) item1.put() retrieved = table.get_item(item1_key, item1_range, consistent_read=True) self.assertEqual(retrieved['Message'], 'S3 Thread 1 message text') self.assertEqual(retrieved['Views'], 0) self.assertEqual(retrieved['Tags'], set(['largeobject', 'multipart upload'])) self.assertEqual(retrieved['BinaryData'], Binary('\x01\x02\x03\x04')) # Also comparable directly to bytes: self.assertEqual(retrieved['BinaryData'], bytes('\x01\x02\x03\x04')) self.assertEqual(retrieved['BinarySequence'], set([Binary('\x01\x02'), Binary('\x03\x04')]))
def _create_vector_item(self, vec_id, vector): item = self._get_or_create_item('vector', vec_id) item['value'] = Binary(vector.astype(self.precision).tostring()) item['precision'] = self.precision_name return item