def _query( self, limit=None, index=None, reverse=False, consistent=False, exclusive_start_key=None, **filter_kwargs ): """ The internal method that performs the actual queries. Used extensively by ``ResultSet`` to perform each (paginated) request. """ kwargs = {"limit": limit, "index_name": index, "scan_index_forward": reverse, "consistent_read": consistent} if exclusive_start_key: kwargs["exclusive_start_key"] = {} for key, value in exclusive_start_key.items(): kwargs["exclusive_start_key"][key] = self._dynamizer.encode(value) # Convert the filters into something we can actually use. kwargs["key_conditions"] = self._build_filters(filter_kwargs, using=QUERY_OPERATORS) raw_results = self.connection.query(self.table_name, **kwargs) results = [] last_key = None for raw_item in raw_results.get("Items", []): item = Item(self) item.load({"Item": raw_item}) results.append(item) if raw_results.get("LastEvaluatedKey", None): last_key = {} for key, value in raw_results["LastEvaluatedKey"].items(): last_key[key] = self._dynamizer.decode(value) return {"results": results, "last_key": last_key}
def _scan(self, limit=None, exclusive_start_key=None, segment=None, total_segments=None, **filter_kwargs): """ The internal method that performs the actual scan. Used extensively by ``ResultSet`` to perform each (paginated) request. """ kwargs = {"limit": limit, "segment": segment, "total_segments": total_segments} if exclusive_start_key: kwargs["exclusive_start_key"] = {} for key, value in exclusive_start_key.items(): kwargs["exclusive_start_key"][key] = self._dynamizer.encode(value) # Convert the filters into something we can actually use. kwargs["scan_filter"] = self._build_filters(filter_kwargs, using=FILTER_OPERATORS) raw_results = self.connection.scan(self.table_name, **kwargs) results = [] last_key = None for raw_item in raw_results.get("Items", []): item = Item(self) item.load({"Item": raw_item}) results.append(item) if raw_results.get("LastEvaluatedKey", None): last_key = {} for key, value in raw_results["LastEvaluatedKey"].items(): last_key[key] = self._dynamizer.decode(value) return {"results": results, "last_key": last_key}
def get_item(self, consistent=False, attributes=None, **kwargs): """ Fetches an item (record) from a table in DynamoDB. To specify the key of the item you'd like to get, you can specify the key attributes as kwargs. Optionally accepts a ``consistent`` parameter, which should be a boolean. If you provide ``True``, it will perform a consistent (but more expensive) read from DynamoDB. (Default: ``False``) Optionally accepts an ``attributes`` parameter, which should be a list of fieldname to fetch. (Default: ``None``, which means all fields should be fetched) Returns an ``Item`` instance containing all the data for that record. Example:: # A simple hash key. >>> john = users.get_item(username='******') >>> john['first_name'] 'John' # A complex hash+range key. >>> john = users.get_item(username='******', last_name='Doe') >>> john['first_name'] 'John' # A consistent read (assuming the data might have just changed). >>> john = users.get_item(username='******', consistent=True) >>> john['first_name'] 'Johann' # With a key that is an invalid variable name in Python. # Also, assumes a different schema than previous examples. >>> john = users.get_item(**{ ... 'date-joined': 127549192, ... }) >>> john['first_name'] 'John' """ raw_key = self._encode_keys(kwargs) item_data = self.connection.get_item( self.table_name, raw_key, attributes_to_get=attributes, consistent_read=consistent ) if 'Item' not in item_data: raise exceptions.ItemNotFound("Item %s couldn't be found." % kwargs) item = Item(self) item.load(item_data) return item
def _query(self, limit=None, index=None, reverse=False, consistent=False, exclusive_start_key=None, select=None, attributes_to_get=None, **filter_kwargs): """ The internal method that performs the actual queries. Used extensively by ``ResultSet`` to perform each (paginated) request. """ kwargs = { 'limit': limit, 'index_name': index, 'consistent_read': consistent, 'select': select, 'attributes_to_get': attributes_to_get, } if reverse: kwargs['scan_index_forward'] = False if exclusive_start_key: kwargs['exclusive_start_key'] = {} for key, value in exclusive_start_key.items(): kwargs['exclusive_start_key'][key] = \ self._dynamizer.encode(value) # Convert the filters into something we can actually use. kwargs['key_conditions'] = self._build_filters( filter_kwargs, using=QUERY_OPERATORS ) raw_results = self.connection.query( self.table_name, **kwargs ) results = [] last_key = None for raw_item in raw_results.get('Items', []): item = Item(self) item.load({ 'Item': raw_item, }) results.append(item) if raw_results.get('LastEvaluatedKey', None): last_key = {} for key, value in raw_results['LastEvaluatedKey'].items(): last_key[key] = self._dynamizer.decode(value) return { 'results': results, 'last_key': last_key, }
def _batch_get(self, keys, consistent=False): """ The internal method that performs the actual batch get. Used extensively by ``BatchGetResultSet`` to perform each (paginated) request. """ items = { self.table_name: { 'Keys': [], }, } if consistent: items[self.table_name]['ConsistentRead'] = True for key_data in keys: raw_key = {} for key, value in key_data.items(): raw_key[key] = self._dynamizer.encode(value) items[self.table_name]['Keys'].append(raw_key) raw_results = self.connection.batch_get_item(request_items=items) results = [] unprocessed_keys = [] for raw_item in raw_results['Responses'].get(self.table_name, []): item = Item(self) item.load({ 'Item': raw_item, }) results.append(item) raw_unproccessed = raw_results.get('UnprocessedKeys', {}) for raw_key in raw_unproccessed.get('Keys', []): py_key = {} for key, value in raw_key.items(): py_key[key] = self._dynamizer.decode(value) unprocessed_keys.append(py_key) return { 'results': results, # NEVER return a ``last_key``. Just in-case any part of # ``ResultSet`` peeks through, since much of the # original underlying implementation is based on this key. 'last_key': None, 'unprocessed_keys': unprocessed_keys, }
def _scan(self, limit=None, exclusive_start_key=None, segment=None, total_segments=None, attributes=None, **filter_kwargs): """ The internal method that performs the actual scan. Used extensively by ``ResultSet`` to perform each (paginated) request. """ kwargs = { 'limit': limit, 'segment': segment, 'total_segments': total_segments, 'attributes_to_get': attributes, } if exclusive_start_key: kwargs['exclusive_start_key'] = {} for key, value in exclusive_start_key.items(): kwargs['exclusive_start_key'][key] = \ self._dynamizer.encode(value) # Convert the filters into something we can actually use. kwargs['scan_filter'] = self._build_filters( filter_kwargs, using=FILTER_OPERATORS ) raw_results = self.connection.scan( self.table_name, **kwargs ) results = [] last_key = None for raw_item in raw_results.get('Items', []): item = Item(self) item.load({ 'Item': raw_item, }) results.append(item) if raw_results.get('LastEvaluatedKey', None): last_key = {} for key, value in raw_results['LastEvaluatedKey'].items(): last_key[key] = self._dynamizer.decode(value) return { 'results': results, 'last_key': last_key, }
def _scan(self, limit=None, exclusive_start_key=None, segment=None, total_segments=None, attributes=None, **filter_kwargs): """ The internal method that performs the actual scan. Used extensively by ``ResultSet`` to perform each (paginated) request. """ kwargs = { 'limit': limit, 'segment': segment, 'total_segments': total_segments, 'attributes_to_get': attributes, } if exclusive_start_key: kwargs['exclusive_start_key'] = {} for key, value in exclusive_start_key.items(): kwargs['exclusive_start_key'][key] = \ self._dynamizer.encode(value) # Convert the filters into something we can actually use. kwargs['scan_filter'] = self._build_filters(filter_kwargs, using=FILTER_OPERATORS) raw_results = self.connection.scan(self.table_name, **kwargs) results = [] last_key = None for raw_item in raw_results.get('Items', []): item = Item(self) item.load({ 'Item': raw_item, }) results.append(item) if raw_results.get('LastEvaluatedKey', None): last_key = {} for key, value in raw_results['LastEvaluatedKey'].items(): last_key[key] = self._dynamizer.decode(value) return { 'results': results, 'last_key': last_key, }
def flush(self): batch_data = { self.table.table_name: [ # We'll insert data here shortly. ], } for put in self._to_put: item = Item(self.table, data=put) batch_data[self.table.table_name].append({ 'PutRequest': { 'Item': item.prepare_full(), } }) for delete in self._to_delete: batch_data[self.table.table_name].append({ 'DeleteRequest': { 'Key': self.table._encode_keys(delete), } }) response = self.table.connection.batch_write_item(batch_data) self._to_put = [] self._to_delete = [] # handle unprocessed items unprocessed = response.get('UnprocessedItems', None) if unprocessed: unprocessed_list = unprocessed[self.table.table_name] item = Item(self) for u in unprocessed_list: if u.has_key("PutRequest"): item.load({ 'Item': u['PutRequest']['Item'], }) self._to_put.append(item._data) elif u.has_key("DeleteRequest"): item.load({ 'Item': u['DeleteRequest']['Key'], }) self._to_delete.append(item._data) else: raise Exception("Error respond") return True