async def get_pure_recipe_list(conn, limit, offset, where_list, usr, many, favored): ''' selects related source and category for recipe ''' ''' gets number of value=True votes to likes field ''' ''' gets liked=True if user liked for recipe ''' # we fetch only liked recipes if favored is true otherwise full list fetch_all_recipes = False if favored else True if usr: alias1 = vote.alias('alias1') alias2 = vote.alias('alias2') query = sa.select([recipe, source, category, alias2.c.value.label('liked'), sa.func.count(alias1.c.recipe_id).label('likes')], use_labels=True).\ select_from( recipe.join(source, source.c.id == recipe.c.source_id) .join(category, category.c.id == recipe.c.category_id) .join( alias1, sa.and_(alias1.c.recipe_id == recipe.c.id, alias1.c.value == True), isouter=fetch_all_recipes) .join( alias2, sa.and_(alias2.c.recipe_id == recipe.c.id, alias2.c.value == True, alias2.c.user_id == usr['id']), isouter=fetch_all_recipes) ).group_by(recipe.c.id, source.c.id, category.c.id, alias2.c.value) else: query = sa.select([recipe, source, category, sa.func.count(vote.c.recipe_id).label('likes')], use_labels=True).\ select_from( recipe.join(source, source.c.id == recipe.c.source_id) .join(category, category.c.id == recipe.c.category_id) .join(vote, sa.and_(vote.c.recipe_id == recipe.c.id, vote.c.value == True), isouter=True) ).group_by(recipe.c.id, source.c.id, category.c.id) for where in where_list: query = query.where(where) if not many: cursor = await conn.execute(query) recipe_record = await cursor.fetchone() if not recipe_record: raise RecordNotFound('No recipe with such id') rec = dict(recipe_record) return [rec] query = query.limit(limit).offset(offset) cursor = await conn.execute(query) recipe_records = await cursor.fetchall() recipes = [dict(q) for q in recipe_records] return recipes
async def update_account(request, account, role_ids): # language=rst """ Raises: PreconditionFailed: if the account doesn't exist or isn't in the expected state. """ account_data = await account.data() accountroles = metadata().tables['AccountRoles'] async with request.app['engine'].acquire() as conn: async with conn.begin(): log_id = await _accountroleslog_insert( conn, created_by='*****@*****.**', request_info=str(request.headers), account_id=account['account'], action='U', role_ids=role_ids) result_set = await conn.execute(accountroles.update().where( sa.and_( accountroles.c.account_id == account['account'], accountroles.c.log_id == account_data['log_id'])).values( role_ids=role_ids, log_id=log_id)) if result_set.rowcount != 1: raise PreconditionFailed() return log_id
async def get_recipe_list_count(conn, where_list, favored=False, usr=None): if favored: query = sa.select([sa.func.count()]).\ select_from( recipe.join(vote, sa.and_( vote.c.recipe_id == recipe.c.id, vote.c.value == True, vote.c.user_id == usr['id']))) else: query = sa.select([sa.func.count()]).select_from(recipe) for where in where_list: query = query.where(where) cursor = await conn.execute(query) count_record = await cursor.fetchone() return count_record[0]
async def get_file(self, resource_path, row_id, subpath): resource_type = self._get_resource_type(resource_path) table = self._get_table(resource_path) files_table = self.files_tables[resource_type] async with self.pool.acquire() as conn: result = await conn.execute( sa.select([ table.c.revision, table.c['data_' + subpath], files_table.c.blob, ]).select_from( table.join( files_table, sa.and_( files_table.c.id == table.c.id, files_table.c.subpath == subpath, ))).where(table.c.id == row_id)) row = await result.first() if row: return dict(row['data_' + subpath], revision=row.revision, blob=row.blob) else: raise ResourceNotFound("Resource %s not found." % row_id)
async def search(self, resource_path, search_path): operator_args = { 'contains': 2, 'exact': 2, 'ge': 2, 'gt': 2, 'le': 2, 'lt': 2, 'ne': 2, 'startswith': 2, 'show': 1, 'show_all': 0, 'sort': 1, 'offset': 1, 'limit': 1, } operators = [] words = map(urllib.parse.unquote, search_path.split('/')) operator = next(words, None) while operator: if operator not in operator_args: raise Exception("Unknown operator %r." % operator) args_count = operator_args[operator] try: args = [next(words) for i in range(args_count)] except StopIteration: raise Exception("Operator %r requires at least %d arguments." % (operator, args_count)) operators.append((operator, args)) operator = next(words, None) sort_keys = [] show_all = False show = [] offset = None limit = None where = [] gin = [] joins = [] table = self._get_table(resource_path) resource_type = self._get_resource_type(resource_path) aux_table = self.aux_tables[resource_type] schema = self._get_prototype_schema(resource_type) for operator, args in operators: if operator == 'show_all': show_all = True elif operator == 'show': show.extend(args) elif operator == 'sort': sort_keys.extend(args) elif operator == 'offset': offset = int(args[0]) elif operator == 'limit': limit = int(args[0]) elif operator == 'exact': key, value = args value = schema[key].search(value, cast=False) gin.append({key: value}) elif operator == 'startswith': key, value = args value = schema[key].search(value, cast=False) alias = aux_table.alias('t' + str(len(joins) + 1)) joins.append(table.join(alias, table.c.id == alias.c.id)) where.append(alias.c.data[key].astext.startswith(value)) elif operator == 'contains': key, value = args value = schema[key].search(value, cast=False) alias = aux_table.alias('t' + str(len(joins) + 1)) joins.append(table.join(alias, table.c.id == alias.c.id)) where.append(alias.c.data[key].astext.contains(value)) elif operator == 'ge': key, value = args value = schema[key].search(value) alias = aux_table.alias('t' + str(len(joins) + 1)) joins.append(table.join(alias, table.c.id == alias.c.id)) where.append(alias.c.data[key] >= value) elif operator == 'gt': key, value = args value = schema[key].search(value) alias = aux_table.alias('t' + str(len(joins) + 1)) joins.append(table.join(alias, table.c.id == alias.c.id)) where.append(alias.c.data[key] > value) elif operator == 'le': key, value = args value = schema[key].search(value) alias = aux_table.alias('t' + str(len(joins) + 1)) joins.append(table.join(alias, table.c.id == alias.c.id)) where.append(alias.c.data[key] <= value) elif operator == 'lt': key, value = args value = schema[key].search(value) alias = aux_table.alias('t' + str(len(joins) + 1)) joins.append(table.join(alias, table.c.id == alias.c.id)) where.append(alias.c.data[key] < value) elif operator == 'ne': key, value = args value = schema[key].search(value) alias = aux_table.alias('t' + str(len(joins) + 1)) joins.append(table.join(alias, table.c.id == alias.c.id)) where.append(alias.c.data[key] != value) else: raise Exception("Operator %r is not yet implemented." % operator) if show_all is False and len(show) == 0: query = sa.select([table.c.id], distinct=table.c.id) else: query = sa.select([table.c.id, table.c.revision, table.c.data], distinct=table.c.id) for join in joins: query = query.select_from(join) if gin: where.append(table.c.search.contains(gin)) if where: query = query.where(sa.and_(*where)) if sort_keys: db_sort_keys = [] for sort_key in sort_keys: if sort_key == 'id': db_sort_keys.append(table.c.id) else: db_sort_keys.append(table.c.data[sort_key]) query = query.order_by(*db_sort_keys) if limit: query = query.limit(limit) if offset: query = query.offset(offset) async with self.pool.acquire() as conn: result = conn.execute(query) if show_all: return [ dict(row.data, id=row.id, revision=row.revision) async for row in result ] elif show: return [ dict( { field: row.data[field] for field in show if field in row.data }, id=row.id) async for row in result ] else: return [{'id': row.id} async for row in result]