def reduceProcess(data_id, entities): # TODO: (Aruna) Fix these import from melange.logic import cached_list from melange.utils import lists ctx = context.get() params = ctx.mapreduce_spec.mapper.params list_id = params['list_id'] ndb.transaction(lambda: cached_list.setCacheItems( data_id, map(json.loads, entities), lists.getList(list_id).valid_period))
def reduceProcess(data_id, entities): # TODO: (Aruna) Fix these import from melange.logic import cached_list from melange.utils import lists ctx = context.get() params = ctx.mapreduce_spec.mapper.params list_id = params['list_id'] ndb.transaction( lambda: cached_list.setCacheItems(data_id, map(json.loads, entities), lists.getList(list_id).valid_period))
def mapProcess(entity): # TODO: (Aruna) Fix this import from melange.utils import lists ctx = context.get() params = ctx.mapreduce_spec.mapper.params list_id = params['list_id'] col_funcs = [(c.col_id, c.getValue) for c in lists.getList(list_id).columns] query_pickle = params['query_pickle'] query = pickle.loads(query_pickle) data_id = lists.getDataId(query) if(query.filter('__key__', entity.key()).get()): item = json.dumps(lists.toListItemDict(entity, col_funcs)) yield (data_id, item)
def mapProcess(entity): # TODO: (Aruna) Fix this import from melange.utils import lists ctx = context.get() params = ctx.mapreduce_spec.mapper.params list_id = params['list_id'] col_funcs = [(c.col_id, c.getValue) for c in lists.getList(list_id).columns] query_pickle = params['query_pickle'] query = pickle.loads(query_pickle) data_id = lists.getDataId(query) if (query.filter('__key__', entity.key()).get()): item = json.dumps(lists.toListItemDict(entity, col_funcs)) yield (data_id, item)