def by_ship_type(cls, ship_type_id): _log.debug('LossMailAttributes.by_ship_type(%d)', ship_type_id) cursor = None while True: req = googledatastore.RunQueryRequest() query = req.gql_query query.query_string = ('SELECT * FROM LossMailAttributes ' + 'WHERE ship_type_id = @shipTypeId ' + 'LIMIT 50 OFFSET @startCursor ') query.allow_literal = True ship_type_arg = query.name_arg.add() ship_type_arg.name = 'shipTypeId' ship_type_arg.value.integer_value = ship_type_id cursor_arg = query.name_arg.add() cursor_arg.name = 'startCursor' if cursor is None: cursor_arg.value.integer_value = 0 else: cursor_arg.cursor = cursor resp = googledatastore.run_query(req) for result in resp.batch.entity_result: entity = cls() entity._set_entity(result.entity) yield entity if resp.batch.more_results == googledatastore.QueryResultBatch.NO_MORE_RESULTS: break if len(resp.batch.entity_result) == 0: break cursor = resp.batch.end_cursor
def unverified_payments(cls): _log.debug('Payment.unverified_payments()') cursor = None while True: req = googledatastore.RunQueryRequest() query = req.gql_query query.query_string = ('SELECT * FROM Payment ' + 'WHERE paid = TRUE ' + 'AND api_verified = FALSE ' + 'LIMIT 50 OFFSET @startCursor ') query.allow_literal = True cursor_arg = query.name_arg.add() cursor_arg.name = 'startCursor' if cursor is None: cursor_arg.value.integer_value = 0 else: cursor_arg.cursor = cursor resp = googledatastore.run_query(req) for result in resp.batch.entity_result: entity = cls() entity._set_entity(result.entity) yield entity if resp.batch.more_results == googledatastore.QueryResultBatch.NO_MORE_RESULTS: break if len(resp.batch.entity_result) == 0: break cursor = resp.batch.end_cursor
def wipeout(cls): """ Deletes all entities of this type from the datastore. """ _log.debug('%s.wipeout()', cls.__name__) batch_size = 500 cursor = None while True: req = googledatastore.RunQueryRequest() query = req.gql_query query.query_string = 'SELECT __key__ FROM %s LIMIT %d OFFSET @startCursor ' % ( cls.__name__, batch_size) query.allow_literal = True cursor_arg = query.name_arg.add() cursor_arg.name = 'startCursor' if cursor is None: cursor_arg.value.integer_value = 0 else: cursor_arg.cursor = cursor resp = googledatastore.run_query(req) req = googledatastore.CommitRequest() req.mode = googledatastore.CommitRequest.NON_TRANSACTIONAL req.mutation.delete.extend( [result.entity.key for result in resp.batch.entity_result]) googledatastore.commit(req) if resp.batch.more_results == googledatastore.QueryResultBatch.NO_MORE_RESULTS: break if len(resp.batch.entity_result) == 0: break cursor = resp.batch.end_cursor
def all_after(cls, kill_id): _log.debug('KillMail.all_after(%d)', kill_id) req = googledatastore.RunQueryRequest() query = req.gql_query if type(kill_id) is int: query.query_string = ('SELECT * FROM KillMail ' + 'WHERE kill_id > @killId ' + 'ORDER BY kill_id ' + 'LIMIT 50 OFFSET @startCursor ') query.allow_literal = True kill_id_arg = query.name_arg.add() kill_id_arg.name = 'killId' kill_id_arg.value.integer_value = kill_id else: query.query_string = ('SELECT * FROM KillMail ' + 'WHERE kill_time > @killTime ' + 'ORDER BY kill_time ' + 'LIMIT 50 OFFSET @startCursor ') query.allow_literal = True ts = _date_to_timestamp(kill_id) kill_time_arg = query.name_arg.add() kill_time_arg.name = 'killTime' kill_time_arg.value.timestamp_microseconds_value = ts return cls.query(req)
def archive(cls): """Delete all Todo items that are done.""" req = datastore.BeginTransactionRequest() resp = datastore.begin_transaction(req) tx = resp.transaction req = datastore.RunQueryRequest() req.read_options.transaction = tx q = req.query set_kind(q, kind='Todo') add_projection(q, '__key__') set_composite_filter( q.filter, datastore.CompositeFilter.AND, set_property_filter(datastore.Filter(), 'done', datastore.PropertyFilter.EQUAL, True), set_property_filter(datastore.Filter(), '__key__', datastore.PropertyFilter.HAS_ANCESTOR, default_todo_list.key)) resp = datastore.run_query(req) keys = [r.entity.key for r in resp.batch.entity_result] req = datastore.CommitRequest() req.transaction = tx req.mutation.delete.extend(keys) resp = datastore.commit(req) return ''
def all(cls): """ Loads all entities of this type from the datastore. """ _log.debug('%s.all()', cls.__name__) req = googledatastore.RunQueryRequest() query = req.gql_query query.query_string = 'SELECT * FROM %s LIMIT 50 OFFSET @startCursor ' % cls.__name__ query.allow_literal = True return cls.query(req)
def get_all(cls): """Query for all Todo items ordered by creation date.""" req = datastore.RunQueryRequest() q = req.query set_kind(q, kind='Todo') set_property_filter(q.filter, '__key__', datastore.PropertyFilter.HAS_ANCESTOR, default_todo_list.key) resp = datastore.run_query(req) todos = [Todo.from_proto(r.entity) for r in resp.batch.entity_result] return todos
def get_by_name(name): req = googledatastore.RunQueryRequest() req.query.kind.add().name = 'Tower' name_filter = req.query.filter.property_filter name_filter.property.name = 'pos_name' name_filter.operator = googledatastore.PropertyFilter.EQUAL name_filter.value.string_value = name resp = googledatastore.run_query(req) if resp.batch.entity_result: tower = Tower() tower._set_entity(resp.batch.entity_result[0].entity) return tower return None
def get_all(cls): """Query for all Todo items ordered by creation date. This method is eventually consistent to avoid the need for an extra index. """ req = datastore.RunQueryRequest() q = req.query set_kind(q, kind='Todo') add_property_orders(q, 'created') resp = datastore.run_query(req) todos = [Todo.from_proto(r.entity) for r in resp.batch.entity_results] return todos
def testRunQuery(self): request = datastore.RunQueryRequest() request.query.kind.add().name = 'Foo' payload = request.SerializeToString() response = datastore.RunQueryResponse() self.expectRequest( 'https://example.com/datastore/v1/projects/foo:runQuery', method='POST', body=payload, headers=self.makeExpectedHeaders(payload)).AndReturn( (TestResponse(status=200, reason='Found'), response.SerializeToString())) self.mox.ReplayAll() resp = self.conn.run_query(request) self.assertEqual(response, resp) self.mox.VerifyAll()
def testRunQuery(self): request = datastore.RunQueryRequest() request.query.kind.add().name = 'Foo' payload = request.SerializeToString() response = datastore.RunQueryResponse() self.expectRequest( 'https://datastore.com/datastore/v1beta1/datasets/foo/runQuery', method='POST', body=payload, headers={'Content-Type': 'application/x-protobuf', 'Content-Length': str(len(payload))}).AndReturn( (TestResponse(status=200, reason='Found'), response.SerializeToString())) self.mox.ReplayAll() resp = self.conn.run_query(request) self.assertEqual(response, resp) self.mox.VerifyAll()
def losses_after(cls, date): _log.debug('KillMail.losses_after(%s)', date) ts = _date_to_timestamp(date) req = googledatastore.RunQueryRequest() query = req.gql_query query.query_string = ('SELECT * FROM KillMail ' + 'WHERE loss_mail = TRUE ' + 'AND kill_time > @date ' + 'ORDER BY kill_time, kill_id ' + 'LIMIT 50 OFFSET @startCursor ') query.allow_literal = True date_arg = query.name_arg.add() date_arg.name = 'date' date_arg.value.timestamp_microseconds_value = ts return cls.query(req)
def cbquery(self, master): req = datastore.RunQueryRequest() query = req.query query.kind.add().name = 'PSCams' master_filter = query.filter.property_filter master_filter.property.name = 'master' master_filter.operator = datastore.PropertyFilter.EQUAL master_filter.value.string_value = master resp = datastore.run_query(req) mycams = [] for entity_result in resp.batch.entity_result: entity = entity_result.entity for prop in entity.property: if prop.name == 'camid': camid = prop.value.string_value elif prop.name == 'name': camname = prop.value.string_value mycams.append([camid, camname]) return mycams
def testRunQuery(self): request = datastore.RunQueryRequest() request.query.kind.add().name = 'Foo' payload = request.SerializeToString() proto_response = datastore.RunQueryResponse() response = httplib2.Response({ 'status': 200, 'content-type': 'application/x-protobuf', }) self.expectRequest( 'https://example.com/datastore/v1/projects/foo:runQuery', method='POST', body=payload, headers=self.makeExpectedHeaders(payload)).AndReturn( (response, proto_response.SerializeToString())) self.mox.ReplayAll() resp = self.conn.run_query(request) self.assertEqual(proto_response, resp) self.mox.VerifyAll()
def read_by_indexes(table_name, index_name_values=None): """Index reader.""" req = datastore.RunQueryRequest() query = req.query query.kind.add().name = table_name if not index_name_values: index_name_values = [] for name, val in index_name_values: queryFilter = query.filter.property_filter queryFilter.property.name = name queryFilter.operator = datastore.PropertyFilter.EQUAL queryFilter.value.string_value = str(val) loop_its = 0 have_more = True while have_more: resp = datastore.run_query(req) found_something = False for found in resp.batch.entity_result: yield extract_entity(found) found_something = True if not found_something: # This is a guard against bugs or excessive looping - as long we # can keep yielding records we'll continue to execute loop_its += 1 if loop_its > 5: raise ValueError("Exceeded the excessive query threshold") if resp.batch.more_results != datastore.QueryResultBatch.NOT_FINISHED: have_more = False else: have_more = True end_cursor = resp.batch.end_cursor query.start_cursor.CopyFrom(end_cursor)
def related_kills(self, back_minutes=60, forward_minutes=15, system_ids=None): micros_per_minute = 60 * 1000000L ts = _date_to_timestamp(self.kill_time) start = ts - back_minutes * micros_per_minute end = ts + forward_minutes * micros_per_minute if system_ids is None: system_ids = [self.solar_system_id] for system in system_ids: req = googledatastore.RunQueryRequest() query = req.gql_query query.query_string = ('SELECT * FROM KillMail ' + 'WHERE solar_system_id = @system ' + 'AND kill_time >= @startTime ' + 'AND kill_time < @endTime ' + 'ORDER BY kill_time, kill_id ') system_arg = query.name_arg.add() system_arg.name = 'system' system_arg.value.integer_value = system start_time_arg = query.name_arg.add() start_time_arg.name = 'startTime' start_time_arg.value.timestamp_microseconds_value = start end_time_arg = query.name_arg.add() end_time_arg.name = 'endTime' end_time_arg.value.timestamp_microseconds_value = end resp = googledatastore.run_query(req) for result in resp.batch.entity_result: entity = KillMail() entity._set_entity(result.entity) yield entity