def produktionsmittel(): produktionsmittel_qry = db.session.query(Kaeufe.id, Angebote.name, Angebote.beschreibung,\ func.concat(func.round(Angebote.preis, 2), " Std.").label("preis"),\ func.concat(func.round(func.coalesce(func.sum(Produktionsmittel.prozent_gebraucht), 0), 2), " %").\ label("prozent_gebraucht"))\ .select_from(Kaeufe)\ .filter(Kaeufe.betrieb==current_user.id).outerjoin(Produktionsmittel,\ Kaeufe.id==Produktionsmittel.kauf).join(Angebote, Kaeufe.angebot==Angebote.id).\ group_by(Kaeufe, Angebote, Produktionsmittel.kauf) produktionsmittel_aktiv = produktionsmittel_qry.having(func.coalesce(func.sum(Produktionsmittel.prozent_gebraucht).\ label("prozent_gebraucht"), 0).label("prozent_gebraucht")<100).all() produktionsmittel_inaktiv = produktionsmittel_qry.having(func.coalesce(func.sum(Produktionsmittel.prozent_gebraucht).\ label("prozent_gebraucht"), 0).label("prozent_gebraucht")== 100).all() table_aktiv = ProduktionsmittelTable( produktionsmittel_aktiv, no_items="(Keine Produktionsmittel vorhanden.)") table_inaktiv = ProduktionsmittelTable( produktionsmittel_inaktiv, no_items="(Noch keine Produktionsmittel verbraucht.)") return render_template('produktionsmittel.html', table_aktiv=table_aktiv, table_inaktiv=table_inaktiv)
def compra_mensual(): tbl_compra = Compra.mapper.mapped_table tbl_artcompra = ArticuloCompra.mapper.mapped_table tbl_articulo = Articulo.mapper.mapped_table tbl_marca = Marca.mapper.mapped_table # stmt = select([func.concat(func.year(tbl_compra.c.fecha), "-", format(func.month(tbl_compra.c.fecha), "02")).label("periodo"), stmt = select( [ func.concat( func.year(tbl_compra.c.fecha), "-", func.month(tbl_compra.c.fecha)).label("periodo"), func.concat(tbl_articulo.c.descripcion, " ", tbl_marca.c.denominacion, " ", tbl_articulo.c.cantidad, " ", tbl_articulo.c.unidad_medida).label("articulo"), func.sum(tbl_artcompra.c.cantidad).label("cantidad"), ], from_obj=tbl_compra.join(tbl_artcompra).join(tbl_articulo).join( tbl_marca), group_by=[ func.year(tbl_compra.c.fecha), func.month(tbl_compra.c.fecha), tbl_artcompra.c.articulo_id ], ) return stmt.alias("compra_mensual")
def lista_de_precios(): tbl_articulo = Articulo.mapper.mapped_table tbl_estab = Establecimiento.mapper.mapped_table tbl_marca = Marca.mapper.mapped_table pa = precio_actualizado() stmt = select([func.concat(tbl_articulo.c.descripcion, " ", tbl_marca.c.denominacion, " ", tbl_articulo.c.cantidad, " ", tbl_articulo.c.unidad_medida).label("articulo"), func.concat(tbl_estab.c.denominacion, " ", tbl_estab.c.domicilio).label("establecimiento"), pa.c.fecha, pa.c.precio, ], from_obj=pa.join(tbl_articulo).join(tbl_marca).join(tbl_estab), order_by=[func.concat(tbl_articulo.c.descripcion, " ", tbl_marca.c.denominacion), pa.c.precio] ) return stmt.alias("lista_de_precios")
async def get_products_to_reorder(): """Return report about products to reorder.""" available = products.c.units_in_stock - products.c.units_on_order contact = func.concat(suppliers.c.contact_title, text("': '"), suppliers.c.contact_name, text("' via '"), suppliers.c.phone) to_reorder = available - products.c.reorder_level query = select( [ products.c.product_id, products.c.product_name, categories.c.category_name, products.c.units_in_stock, products.c.units_on_order, available.label('units_available'), products.c.reorder_level, suppliers.c.company_name.label('supplier'), contact.label('contact') ] ).select_from( join(join(products, suppliers, products.c.supplier_id == suppliers.c.supplier_id), categories, products.c.category_id == categories.c.category_id) ).where( and_( products.c.discontinued == 0, to_reorder <= 0 ) ) print(query) return await database.fetch_all(query=query)
def analytics_compability(): user2set = dict([ (user_id, map(operator.itemgetter(0), { "artist" : db.session.query(Scrobble.artist).\ group_by(Scrobble.artist), "track" : db.session.query(func.concat(Scrobble.artist, literal_column('" – "'), Scrobble.track)).\ group_by(Scrobble.artist, Scrobble.track), } [request.args.get("criterion")].\ filter_by(user_id=user_id).\ having(func.count(Scrobble.id) > int(request.args.get("more_than_x_scrobbles"))).\ all())) for user_id in map(int, request.args.getlist("users")) ]) user2username = dict(db.session.query(User.id, User.username).all()) length2groups = [ (length, filter(lambda (users, set): len(set) > 0, sorted([ ( ", ".join(sorted([user2username[i] for i in user2username if i in group], key=lambda username: username.lower())), reduce(set.intersection, map(set, [user2set[user_id] for user_id in group])) ) for group in itertools.combinations(map(int, request.args.getlist("users")), length) if len(group) == length ], key=lambda (users, set): -len(set)))[:10]) for length in range(2, len(user2username) + 1) ]
def customersearch(db): authorize() # Split search term and then if two terms, check first name # and last name separately # If one term, check first or last name splitted = post_get('searchTerm').split(' ') deps = None if len(splitted) == 1: deps = db.query(Dependent).filter(Dependent.isPrimary)\ .filter(func.concat(Dependent.firstName, ' ', Dependent.lastName) .ilike("%" + splitted[0] + "%"))\ .order_by(Dependent.lastName, Dependent.firstName) else: deps = db.query(Dependent).filter(Dependent.isPrimary)\ .filter(and_(Dependent.firstName.ilike("%" + splitted[0] + "%"), \ Dependent.lastName.ilike("%" + splitted[1] + "%")))\ .order_by(Dependent.lastName, Dependent.firstName) depDict = [] for dep in deps: if dep.family_id is not None: depDict.append(dep.getDict()) bottle.response.content_type = 'application/json' jsonInfo = json.dumps(depDict, default=json_util.default) return jsonInfo
def upgrade(): for t in slugged_tables: op.execute(t.update()\ .where(t.c.slug == '')\ .values(slug=func.concat( op.inline_literal(t.singular_name + " "), t.c.id, ), )) op.create_unique_constraint('uq_'+t.name, t.name, ['slug',])
def _project_list_endpoint(config: ProjectClassConfig, sel: SelectingStudent, row_formatter, state=None): # check that project is viewable for this ProjectClassConfig instance if state is None: state = config.selector_lifecycle if not verify_open(config, state=state): return jsonify({}) base_query = config.live_projects \ .filter(~LiveProject.hidden) \ .join(Project, Project.id == LiveProject.parent_id) \ .join(User, User.id == Project.owner_id) \ .join(ResearchGroup, ResearchGroup.id == LiveProject.group_id) if sel is not None: if sel.group_filters.first(): base_query = base_query.filter( or_(ResearchGroup.id == g.id for g in sel.group_filters)) if sel.skill_filters.first(): base_query = base_query.filter( or_( LiveProject.skills.any(SkillGroup.id == s.id) for s in sel.skill_filters)) name = { 'search': LiveProject.name, 'order': LiveProject.name, 'search_collation': 'utf8_general_ci' } supervisor = { 'search': func.concat(User.first_name, ' ', User.last_name), 'order': [User.last_name, User.first_name], 'search_collation': 'utf8_general_ci' } group = { 'search': ResearchGroup.name, 'order': ResearchGroup.name, 'search_collation': 'utf8_general_ci' } meeting = {'order': LiveProject.meeting_reqd} columns = { 'name': name, 'supervisor': supervisor, 'group': group, 'meeting': meeting } with ServerSideHandler(request, base_query, columns) as handler: return handler.build_payload(row_formatter)
def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" op.add_column("assessment_templates", sa.Column("slug", sa.String(length=250), nullable=False)) assessment_templates_table = table("assessment_templates", column('id', sa.Integer), column('slug', sa.Integer)) op.execute(assessment_templates_table.update().values(slug=func.concat( op.inline_literal("TEMPLATE-"), assessment_templates_table.c.id, ), ))
def update_baer(): if not set(["sysla", "sokn", "baer"]).issubset(set(request.json.keys())): abort(400) syslur = [{"value": "", "text": ""}] soknir = [{"value": "", "text": ""}] # Sysla sysla_query = (db.session.query( models.Danarbu.sysla_heiti).distinct().order_by( models.Danarbu.sysla_heiti)) if len(request.json["baer"]) > 0: sysla_query = sysla_query.filter( models.Danarbu.sokn_heiti == request.json["baer"].split(":")[1], models.Danarbu.sysla_heiti == request.json["baer"].split(":")[2], ) # Sokn sokn_query = (db.session.query( func.concat(models.Danarbu.sokn_heiti, " - ", models.Danarbu.sysla_heiti).label("svikakisa"), models.Danarbu.sokn_heiti, models.Danarbu.sysla_heiti, ).distinct().order_by("svikakisa")) if len(request.json["baer"]) > 0: sokn_query = sokn_query.filter( models.Danarbu.sokn_heiti == request.json["baer"].split(":")[1], models.Danarbu.sysla_heiti == request.json["baer"].split(":")[2], ) for ( svikakisa, sokn, sysla, ) in sokn_query.all(): soknir.append({"value": ":".join([sokn, sysla]), "text": svikakisa}) for sysla in sysla_query.all(): syslur.append({"value": sysla, "text": sysla}) sysla_value = (request.json["baer"].split(":")[2] if len(request.json["baer"]) > 0 else request.json["sysla"]) sokn_value = (":".join(request.json["baer"].split(":")[1:]) if len(request.json["baer"]) > 0 else request.json["sokn"]) return jsonify({ "syslur": syslur, "soknir": soknir, "sysla_value": sysla_value, "sokn_value": sokn_value, })
def system_utilisation_counts_by_group(grouping, systems): retval = defaultdict(lambda: dict((k, 0) for k in ['recipe', 'manual', 'idle_automated', 'idle_manual', 'idle_broken', 'idle_removed'])) query = systems.outerjoin(System.open_reservation)\ .with_entities(grouping, func.coalesce(Reservation.type, func.concat('idle_', func.lower(System.status))), func.count(System.id))\ .group_by(literal_column("1"), literal_column("2")) for group, state, count in query: retval[group][state] = count return retval
def analytics_closer(): user1 = db.session.query(User).get(int(request.args.get("user1"))) user2 = db.session.query(User).get(int(request.args.get("user2"))) if request.args.get("criterion") == "artist": field = Scrobble.artist if request.args.get("criterion") == "track": field = func.concat(Scrobble.artist, Scrobble.track) start_uts = max( db.session.query(func.min(Scrobble.uts)).filter_by(user=user1), db.session.query(func.min(Scrobble.uts)).filter_by(user=user2) ) def gather_shares(user): data = {} for (share, uts) in db.session.query(field, Scrobble.uts).filter(Scrobble.user == user, Scrobble.uts >= start_uts): week = int(math.floor(uts / (86400 * 7)) * (86400 * 7)) if week not in data: data[week] = set() if share not in data[week]: data[week].add(share) return data user1_shares = gather_shares(user1) user2_shares = gather_shares(user2) if request.args.get("criterion_type") == "integral": def integrate_shares(shares): prev_week = None for week in sorted(shares.keys()): if prev_week: shares[week] = set.union(shares[week], shares[prev_week]) prev_week = week return shares user1_shares = integrate_shares(user1_shares) user2_shares = integrate_shares(user2_shares) data = [ [ date.fromtimestamp(week).strftime("%b %Y"), len(user1_shares[week] - user2_shares[week]) / float(len(user1_shares[week])), "", ", ".join(sorted(user1_shares[week] - user2_shares[week])), -len(user2_shares[week] - user1_shares[week]) / float(len(user2_shares[week])), "", ", ".join(sorted(user2_shares[week] - user1_shares[week])), ] for week in sorted(set.intersection(set(user1_shares.keys()), set(user2_shares.keys()))) ] return dict(user1=user1, user2=user2, data=json.dumps(data))
def getFrontEndEPS(stock_id): EPS = db.session\ .query()\ .with_entities( func.concat( Income_Sheet.year, 'Q', Income_Sheet.season).label( "Year/Season"), Income_Sheet.基本每股盈餘)\ .filter_by(stock_id=stock_id)\ .order_by(Income_Sheet.year.desc())\ .order_by(Income_Sheet.season.desc())\ .limit(20).all() data = [row._asdict() for row in EPS][::-1] return jsonify(data)
def sabbaticals_ajax(): pclass_filter = request.args.get('pclass_filter') base_query = db.session.query(EnrollmentRecord) \ .filter(or_(EnrollmentRecord.supervisor_state != EnrollmentRecord.SUPERVISOR_ENROLLED, EnrollmentRecord.marker_state != EnrollmentRecord.MARKER_ENROLLED, EnrollmentRecord.presentations_state != EnrollmentRecord.PRESENTATIONS_ENROLLED)) \ .join(FacultyData, FacultyData.id == EnrollmentRecord.owner_id) \ .join(User, User.id == FacultyData.id) \ .filter(User.active) if pclass_filter != 'all': flag, value = is_integer(pclass_filter) if flag: base_query = base_query.filter(EnrollmentRecord.pclass_id == value) base_query = base_query.join(ProjectClass, ProjectClass.id == EnrollmentRecord.pclass_id) name = { 'search': func.concat(User.first_name + ' ' + User.last_name), 'order': [User.last_name, User.first_name], 'search_collation': 'utf8_general_ci' } pclass = {'order': ProjectClass.name} exemptions = {'search': func.concat(EnrollmentRecord.supervisor_comment, EnrollmentRecord.marker_comment, EnrollmentRecord.presentations_comment), 'order': [EnrollmentRecord.supervisor_reenroll, EnrollmentRecord.marker_reenroll, EnrollmentRecord.presentations_reenroll],\ 'search_collation': 'utf8_general_ci'} columns = {'name': name, 'pclass': pclass, 'exemptions': exemptions} with ServerSideHandler(request, base_query, columns) as handler: return handler.build_payload(ajax.reports.sabbaticals)
def getFrontEndMonthRevenue(stock_id): monthlyReve = db.session\ .query()\ .with_entities( func.concat( Month_Revenue.year, '/', Month_Revenue.month).label( "Year/Month"), Month_Revenue.當月營收, Month_Revenue.去年同月增減)\ .filter_by(stock_id=stock_id)\ .order_by(Month_Revenue.year.desc())\ .order_by(Month_Revenue.month.desc())\ .limit(60).all() data = [row._asdict() for row in monthlyReve][::-1] return jsonify(data)
def system_utilisation_counts(systems): """ Similar to the above except returns counts of systems based on the current state, rather than historical data about particular systems. """ retval = dict((k, 0) for k in ['recipe', 'manual', 'idle_automated', 'idle_manual', 'idle_broken', 'idle_removed']) query = systems.outerjoin(System.open_reservation)\ .with_entities(func.coalesce(Reservation.type, func.concat('idle_', func.lower(System.status))), func.count(System.id))\ .group_by(literal_column("1")) for state, count in query: retval[state] = count return retval
def getFrontEndProfitAnalysis(stock_id): profit = db.session\ .query()\ .with_entities( func.concat( Income_Sheet.year, 'Q', Income_Sheet.season).label( "Year/Season"), Income_Sheet.營業毛利率, Income_Sheet.營業利益率, Income_Sheet.稅前淨利率, Income_Sheet.本期淨利率)\ .filter_by(stock_id=stock_id)\ .order_by(Income_Sheet.year.desc())\ .order_by(Income_Sheet.season.desc())\ .limit(20).all() data = [row._asdict() for row in profit][::-1] return jsonify(data)
def customersearch(db): authorize() searchTerm = "%" + post_get('searchTerm') + "%" deps = db.query(Dependent).filter(Dependent.isPrimary)\ .filter(func.concat(Dependent.firstName, ' ', Dependent.lastName) .ilike(searchTerm)) depDict = [] for dep in deps: if dep.family_id is not None: depDict.append(dep.getDict()) bottle.response.content_type = 'application/json' jsonInfo = json.dumps(depDict, default=json_util.default) return jsonInfo
def upgrade(): """Upgrade database schema and/or data, creating a new revision.""" op.add_column( "assessment_templates", sa.Column("slug", sa.String(length=250), nullable=False) ) assessment_templates_table = table( "assessment_templates", column('id', sa.Integer), column('slug', sa.Integer) ) op.execute(assessment_templates_table.update().values( slug=func.concat( op.inline_literal("TEMPLATE-"), assessment_templates_table.c.id, ), ))
def getFrontEndIncomeSheet(stock_id): incomeSheet = db.session\ .query()\ .with_entities( func.concat( Income_Sheet.year, 'Q', Income_Sheet.season).label( "Year/Season"), Income_Sheet.營業收入合計, Income_Sheet.營業毛利, Income_Sheet.營業利益, Income_Sheet.稅前淨利, Income_Sheet.本期淨利, Income_Sheet.母公司業主淨利)\ .filter_by(stock_id=stock_id)\ .order_by(Income_Sheet.year.desc())\ .order_by(Income_Sheet.season.desc())\ .limit(20).all() data = [row._asdict() for row in incomeSheet][::-1] return jsonify(data)
def meine_kaeufe(): try: user_type = session["user_type"] except: user_type = "nutzer" if user_type == "betrieb": return redirect(url_for('auth.zurueck')) else: session["user_type"] = "nutzer" kaufhistorie = db.session.query(Kaeufe.id, Angebote.name, Angebote.beschreibung,\ func.concat(func.round(Angebote.preis, 2), " Std.").label("preis") ).\ select_from(Kaeufe).\ filter_by(nutzer=current_user.id).\ join(Angebote, Kaeufe.angebot==Angebote.id).all() kaufh_table = KaeufeTable(kaufhistorie, no_items="(Noch keine Käufe.)") return render_template('meine_kaeufe.html', kaufh_table=kaufh_table)
def analytics_recommendations(): user = db.session.query(User).get(request.args.get("user")) table_header = ["Исполнитель", "Друзья", "Друзей", "Прослушиваний"] table_body = db.session.query( func.concat('<a href="http://last.fm/music/', Scrobble.artist, '">', Scrobble.artist, '</a>'), func.group_concat(distinct(User.username).op("SEPARATOR")(literal_column('", "'))), func.count(distinct(User.username)), func.count(Scrobble.id) ).\ join(User).\ filter( Scrobble.user_id.in_(request.args.getlist("users")), ~Scrobble.artist.in_([a[0] for a in db.session.query(distinct(Scrobble.artist)).filter_by(user=user).all()]) ).\ group_by(Scrobble.artist).\ order_by(-func.count(Scrobble.id) if request.args.get("target") == "scrobbles" else -func.count(distinct(User.username))).\ all()[0:1000] return dict(title="Рекомендации для %s" % user.username, table_header=table_header, table_body=table_body)
def getFrontEndOperationExpenseAnalysis(stock_id): operationExpense = db.session\ .query()\ .with_entities( func.concat( Income_Sheet.year, 'Q', Income_Sheet.season).label( "Year/Season"), Income_Sheet.營業費用率, Income_Sheet.推銷費用率, Income_Sheet.管理費用率, Income_Sheet.研究發展費用率, Income_Sheet.營業費用, Income_Sheet.推銷費用, Income_Sheet.管理費用, Income_Sheet.研究發展費用)\ .filter_by(stock_id=stock_id)\ .order_by(Income_Sheet.year.desc())\ .order_by(Income_Sheet.season.desc())\ .limit(20).all() data = [row._asdict() for row in operationExpense][::-1] return jsonify(data)
def count(cls, session, params, conditions=[], distinct=False): u"""计数. eg: BaseModel.count([BaseModel.id, BaseModel.XXX], [BaseModel.id==2]) BaseModel.count(BaseModel.id, [BaseModel.id==2], True) """ if distinct: if isinstance(params, Iterable) and len(params) >= 2: re = session.query(func.count( func.distinct(func.concat(*params))))\ .filter(*conditions).one()[0] elif isinstance(params, Iterable): qp = params[0] re = session.query(func.count( func.distinct(qp))).filter(*conditions).one()[0] else: re = session.query(func.count( func.distinct(params))).filter(*conditions).one()[0] else: if not isinstance(params, Iterable): params = [params] re = session.query(*params).filter(*conditions).count() return re
def arbeit(): arbeiter1 = db.session.query(Nutzer.id, Nutzer.name).\ select_from(Arbeiter).join(Nutzer).filter(Arbeiter.betrieb==current_user.id).group_by(Nutzer.id).all() table1 = ArbeiterTable1(arbeiter1, no_items='(Noch keine Mitarbeiter.)') arbeiter2 = db.session.query(Nutzer.id, Nutzer.name, func.concat(func.sum(Arbeit.stunden), " Std.").label('summe_stunden')).\ select_from(Angebote).filter(Angebote.betrieb==current_user.id).\ join(Arbeit).join(Nutzer).group_by(Nutzer.id).order_by(func.sum(Arbeit.stunden).desc()).all() table2 = ArbeiterTable2(arbeiter2, no_items='(Noch keine Stunden gearbeitet.)') fik = Betriebe.query.filter_by(id=current_user.id).first().fik if request.method == 'POST': # check if nutzer exists, if not flash warning if not Nutzer.query.filter_by(id=request.form['nutzer']).first(): flash("Nutzer existiert nicht.") return redirect(url_for('main_betriebe.arbeit')) # check if nutzer is already arbeiter in betrieb req_arbeiter = Arbeiter.query.filter_by( nutzer=request.form['nutzer'], betrieb=current_user.id).first() # if so, flash warning if req_arbeiter: flash("Nutzer ist bereits in diesem Betrieb beschäftigt.") else: new_arbeiter = Arbeiter(nutzer=request.form['nutzer'], betrieb=current_user.id) db.session.add(new_arbeiter) db.session.commit() return redirect(url_for('main_betriebe.arbeit')) return render_template("arbeit.html", table1=table1, table2=table2, fik=fik)
(addresses.c.email_address == '*****@*****.**')) &\ ~(users.c.id > 5) print(s) s = select([(users.c.fullname + ", " + addresses.c.email_address). label('title')]).\ where(users.c.id == addresses.c.user_id).\ where(users.c.name.between('m', 'z')).\ where(or_( addresses.c.email_address.like('*****@*****.**'), addresses.c.email_address.like('*****@*****.**') )) print(s) print(func.now()) print(func.concat('x', 'y')) print(func.xyz_my_goofy_function()) print(func.current_timestamp()) s = select( [func.max(addresses.c.email_address, type_=String).label('maxemail')]) print(s) s = select([cast(users.c.id, String)]) print(s) s = select([type_coerce({'some_key': {'foo': 'bar'}}, JSON)['some_key']]) print(s.compile(dialect=mysql.dialect())) s = text( "SELECT CONCAT(users.fullname, ', ', addresses.email_address) AS title "
years = [] for i in range(2004, this_year + 1): years.append(str(i)) # FPDS if 'FPDS' in models: dap = DetachedAwardProcurement if key_type == 'award': # awards if 'award' in types: update_keys( dap, 'FPDS', key_type, 'award', years, func.concat( 'CONT_AWD_', func.coalesce(dap.piid, '-none-'), '_', func.coalesce(dap.agency_id, '-none-'), '_', func.coalesce(dap.parent_award_id, '-none-'), '_', func.coalesce(dap.referenced_idv_agency_iden, '-none-'))) # IDV if 'IDV' in types: update_keys( dap, 'FPDS', key_type, 'IDV', years, func.concat('CONT_IDV_', func.coalesce(dap.piid, '-none-'), '_', func.coalesce(dap.agency_id, '-none-'))) else: # transactions if 'award' in types: update_keys( dap, 'FPDS', key_type, 'award', years, func.concat(
class User(BaseModel, AuthUser, RbacUser, ModelUsingFiles): __tablename__ = 'user' user_uuid = Column(String(36), primary_key=True, default=generate_uuid_str(), server_default=func.uuid_generate_v4()) # name = Column(String(64), index=True) email = Column(String(128), nullable=False, unique=True, index=True) password = Column(String(72)) is_admin = Column(Boolean(), default=False) last_login = Column(DateTime(timezone=True)) is_active = Column(Boolean(), default=False) confirmed_on = Column(DateTime(True)) created_on = Column(DateTime(True), server_default=func.now()) # addons logo_file_uuid = FileModelField(FileModel, nullable=True, backend=FILE_BACKEND) logo = file_relationship(FileModel, 'User', 'logo_file_uuid') first_name = Column(String(32), nullable=True, index=True) last_name = Column(String(32), nullable=True, index=True) # alerts flags alert_payment_received = Column(Boolean(), server_default='true') alert_license_expired = Column(Boolean(), server_default='true') alert_license_will_expired = Column(Boolean(), server_default='true') alert_license_purchased = Column(Boolean(), server_default='true') user_id = synonym('user_uuid') name = synonym('email') user_name = synonym('email') is_confirmed = column_property(confirmed_on.isnot(None)) full_name = column_property(func.concat(first_name, ' ', last_name)) @classmethod def init(cls): adm = cls.get(settings.ADMIN_UUID) if not adm: cls(user_uuid=settings.ADMIN_UUID, email=settings.ADMIN_EMAIL, passwd=settings.ADMIN_PWD, is_admin=True, is_active=True).save() test = cls.get(settings.TEST_USER_UUID) if not test: cls(user_uuid=settings.TEST_USER_UUID, email=settings.TEST_USER_EMAIL, passwd=settings.ADMIN_PWD, is_admin=False, is_active=True).save() @hybrid_property def passwd(self): return '********' # self.password @passwd.setter def passwd(self, value): self.password = get_hashed_password(value) @classmethod def get_user_from_credentials(cls, credentials): try: return cls.filter( or_(User.name == credentials['email'], User.email == credentials['email'])).one() except: # errors.NoResultFound: cls.query().session.rollback() return None def get_token_data(self): return dict(user_uuid=self.user_uuid) @classmethod def get_user_from_token_data(cls, token_data): if 'user_uuid' in token_data: return cls.filter( User.user_uuid == token_data['user_uuid']).first() return None @classmethod def get_user_by_id(cls, user_uuid): try: return cls.filter(User.user_uuid == user_uuid).one() except errors.NoResultFound: # pragma: no cover return None def can_login(self, req, resp): if self.is_active: return True else: resources.BaseResource.set_response( resp, resources.responses. UnAuthorizedErrorResponse(data=resources.errors.Error( 10000, 'You are not allowed to login. You must to finish confirm procedure or ask admin to activate account', 'account disabled'))) return False request_ip = helpers.get_request_ip(req) if request_ip in settings.ALWAYS_ALLOWED_AUTH_IPS: return True # try: # UserAuthIp.filter(UserAuthIp.ip == request_ip, UserAuthIp.user_uuid == self.user_uuid).one() # return True # except errors.NoResultFound: # if not UserAuthIp.filter(UserAuthIp.user_uuid == self.user_uuid).count(): # return True resources.BaseResource.set_response( resp, resources.responses.UnAuthorizedErrorResponse( data=resources.errors.Error( 10000, 'You are not allowed to login from the IP {}'. format(request_ip), 'ip_not_allowed'))) return False # noinspection PyMethodMayBeStatic,PyUnusedLocal def can_restore(self, object_revision): return True def get_id(self): return self.user_uuid def user_type(self): return self.get_role().get_name() def get_role(self): if self.is_admin: return AdminRole else: return UserRole return RbacRoleNoRole def before_save(self): super().before_save() if (not self.name) and self.email: self.name = self.email if (not self.email) and self.name: self.email = self.name def apply_mail(self, template_name): return _apply_mail(self, template_name, 'user') @property def reset_password_url(self): if self.token: return '{}{}{}/auth/reset_password/{}'.format( settings.API_SCHEME, settings.API_HOST, settings.API_BASE_PATH, self.token) return '' @property def confirm_url(self): if self.token: return '{}{}{}/registration/confirm/{}'.format( settings.API_SCHEME, settings.API_HOST, settings.API_BASE_PATH, self.token) return '' @property def login_url(self): return '{}{}{}/auth/'.format(settings.API_SCHEME, settings.API_HOST, settings.API_BASE_PATH)
def do_search(search_query, offset, result_count, new_domains, framework_slug): try: sort_dir = list(search_query['sort'][0].values())[0]['order'] except (KeyError, IndexError): sort_dir = 'asc' try: sort_by = list(search_query['sort'][0].values())[0]['sort_by'] except (KeyError, IndexError): sort_by = None try: terms = search_query['query']['filtered']['filter']['terms'] except (KeyError, IndexError): terms = {} roles_list = None seller_types_list = None if terms: new_domains = 'prices.serviceRole.role' not in terms try: if new_domains: roles_list = terms['domains.assessed'] else: roles = terms['prices.serviceRole.role'] roles_list = set(_['role'][7:] for _ in roles) except KeyError: pass try: seller_types_list = terms['seller_types'] except: # noqa pass try: search_term = search_query['query']['match_phrase_prefix']['name'] except KeyError: search_term = '' EXCLUDE_LEGACY_ROLES = not current_app.config['LEGACY_ROLE_MAPPING'] if new_domains: q = db.session.query(Supplier).outerjoin(SupplierDomain).outerjoin(Domain) \ .outerjoin(SupplierFramework).outerjoin(Framework) else: q = db.session.query(Supplier).outerjoin(PriceSchedule).outerjoin(ServiceRole) \ .outerjoin(SupplierFramework).outerjoin(Framework) q = q.filter( Supplier.status != 'deleted', Supplier.abn != Supplier.DUMMY_ABN, or_(Framework.slug == framework_slug, ~Supplier.frameworks.any())) tsquery = None if search_term: if any(c in search_term for c in ['#', '-', '_', '/', '\\']): tsquery = func.phraseto_tsquery(search_term) elif ' ' in search_term: tsquery = func.plainto_tsquery(search_term) else: tsquery = func.to_tsquery(search_term + ":*") q = q.add_column( func.ts_headline( 'english', func.concat(Supplier.summary, ' ', Supplier.data['tools'].astext, ' ', Supplier.data['methodologies'].astext, ' ', Supplier.data['technologies'].astext, ''), tsquery, 'MaxWords=25, MinWords=20, ShortWord=3, HighlightAll=FALSE, MaxFragments=1' )) q = q.group_by(Supplier.id) try: code = search_query['query']['term']['code'] q = q.filter(Supplier.code == code) except KeyError: pass if roles_list is not None: if new_domains: if EXCLUDE_LEGACY_ROLES: d_agg = postgres.array_agg(cast(Domain.name, TEXT)) q = q.filter(SupplierDomain.status == 'assessed') q = q.having(d_agg.contains(array(roles_list))) else: sr_agg = postgres.array_agg( cast(func.substring(ServiceRole.name, 8), TEXT)) q = q.having(sr_agg.contains(array(roles_list))) if seller_types_list is not None and 'recruiter' in seller_types_list: q = q.filter(Supplier.is_recruiter == 'true') seller_types_list.remove('recruiter') if len(seller_types_list) == 0: seller_types_list = None if seller_types_list is not None: selected_seller_types = select( [postgres.array_agg(column('key'))], from_obj=func.json_each_text(Supplier.data[('seller_type', )]), whereclause=cast(column('value'), Boolean)).as_scalar() q = q.filter(selected_seller_types.contains(array(seller_types_list))) if sort_by: if sort_by == 'latest': ob = [desc(Supplier.last_update_time)] else: ob = [asc(Supplier.name)] else: if sort_dir == 'desc': ob = [desc(Supplier.name)] else: ob = [asc(Supplier.name)] if search_term: ob = [desc(func.ts_rank_cd(Supplier.text_vector, tsquery))] + ob q = q.filter(Supplier.text_vector.op('@@')(tsquery)) q = q.order_by(*ob) raw_results = list(q) results = [] for x in range(len(raw_results)): if type(raw_results[x]) is Supplier: result = raw_results[x] else: result = raw_results[x][0] if raw_results[x][1] is not None and raw_results[x][1] != '': result.summary = raw_results[x][1] results.append(result) sliced_results = results[offset:(offset + result_count)] q = db.session.query(Supplier.code, Supplier.name, Supplier.summary, Supplier.is_recruiter, Supplier.data, Domain.name.label('domain_name'), SupplierDomain.status.label('domain_status'))\ .outerjoin(SupplierDomain, Domain)\ .filter(Supplier.id.in_([sr.id for sr in sliced_results]))\ .order_by(Supplier.name) suppliers = [r._asdict() for r in q] sliced_results = [] for key, group in groupby(suppliers, key=itemgetter('code')): supplier = group.next() supplier['seller_type'] = supplier.get( 'data') and supplier['data'].get('seller_type') supplier['domains'] = {'assessed': [], 'unassessed': []} for s in chain([supplier], group): domain, status = s['domain_name'], s['domain_status'] if domain: if status == 'assessed': supplier['domains']['assessed'].append(domain) else: supplier['domains']['unassessed'].append(domain) for e in ['domain_name', 'domain_status', 'data']: supplier.pop(e, None) sliced_results.append(supplier) return sliced_results, len(results)
def do_search(search_query, offset, result_count, new_domains, framework_slug): try: sort_dir = list(search_query['sort'][0].values())[0]['order'] except (KeyError, IndexError): sort_dir = 'asc' try: sort_by = list(search_query['sort'][0].values())[0]['sort_by'] except (KeyError, IndexError): sort_by = None try: terms = search_query['query']['filtered']['filter']['terms'] except (KeyError, IndexError): terms = {} roles_list = None seller_types_list = None if terms: new_domains = 'prices.serviceRole.role' not in terms try: if new_domains: roles_list = terms['domains.assessed'] else: roles = terms['prices.serviceRole.role'] roles_list = set(_['role'][7:] for _ in roles) except KeyError: pass try: seller_types_list = terms['seller_types'] except: # noqa pass try: search_term = search_query['query']['match_phrase_prefix']['name'] except KeyError: search_term = '' EXCLUDE_LEGACY_ROLES = not current_app.config['LEGACY_ROLE_MAPPING'] if new_domains: q = db.session.query(Supplier).outerjoin(SupplierDomain).outerjoin(Domain) \ .outerjoin(SupplierFramework).outerjoin(Framework) else: q = db.session.query(Supplier).outerjoin(PriceSchedule).outerjoin(ServiceRole) \ .outerjoin(SupplierFramework).outerjoin(Framework) q = q.filter(Supplier.status != 'deleted', Supplier.abn != Supplier.DUMMY_ABN, or_(Framework.slug == framework_slug, ~Supplier.frameworks.any())) tsquery = None if search_term: if any(c in search_term for c in ['#', '-', '_', '/', '\\']): tsquery = func.phraseto_tsquery(search_term) elif ' ' in search_term: tsquery = func.plainto_tsquery(search_term) else: tsquery = func.to_tsquery(search_term + ":*") q = q.add_column(func.ts_headline( 'english', func.concat(Supplier.summary, ' ', Supplier.data['tools'].astext, ' ', Supplier.data['methodologies'].astext, ' ', Supplier.data['technologies'].astext, ''), tsquery, 'MaxWords=25, MinWords=20, ShortWord=3, HighlightAll=FALSE, MaxFragments=1' )) q = q.group_by(Supplier.id) try: code = search_query['query']['term']['code'] q = q.filter(Supplier.code == code) except KeyError: pass if roles_list is not None: if new_domains: if EXCLUDE_LEGACY_ROLES: d_agg = postgres.array_agg(cast(Domain.name, TEXT)) q = q.having(d_agg.contains(array(roles_list))) else: sr_agg = postgres.array_agg(cast(func.substring(ServiceRole.name, 8), TEXT)) q = q.having(sr_agg.contains(array(roles_list))) if seller_types_list is not None and 'recruiter' in seller_types_list: q = q.filter(Supplier.is_recruiter == 'true') seller_types_list.remove('recruiter') if len(seller_types_list) == 0: seller_types_list = None if seller_types_list is not None: selected_seller_types = select( [postgres.array_agg(column('key'))], from_obj=func.json_each_text(Supplier.data[('seller_type',)]), whereclause=cast(column('value'), Boolean) ).as_scalar() q = q.filter(selected_seller_types.contains(array(seller_types_list))) if sort_by: if sort_by == 'latest': ob = [desc(Supplier.last_update_time)] else: ob = [asc(Supplier.name)] else: if sort_dir == 'desc': ob = [desc(Supplier.name)] else: ob = [asc(Supplier.name)] if search_term: ob = [desc(func.ts_rank_cd(Supplier.text_vector, tsquery))] + ob q = q.filter(Supplier.text_vector.op('@@')(tsquery)) q = q.order_by(*ob) raw_results = list(q) results = [] for x in range(len(raw_results)): if type(raw_results[x]) is Supplier: result = raw_results[x] else: result = raw_results[x][0] if raw_results[x][1] is not None and raw_results[x][1] != '': result.summary = raw_results[x][1] results.append(result) sliced_results = results[offset:(offset + result_count)] q = db.session.query(Supplier.code, Supplier.name, Supplier.summary, Supplier.is_recruiter, Supplier.data, Domain.name.label('domain_name'), SupplierDomain.status.label('domain_status'))\ .outerjoin(SupplierDomain, Domain)\ .filter(Supplier.id.in_([sr.id for sr in sliced_results]))\ .order_by(Supplier.name) suppliers = [r._asdict() for r in q] sliced_results = [] for key, group in groupby(suppliers, key=itemgetter('code')): supplier = group.next() supplier['seller_type'] = supplier.get('data') and supplier['data'].get('seller_type') supplier['domains'] = {'assessed': [], 'unassessed': []} for s in chain([supplier], group): domain, status = s['domain_name'], s['domain_status'] if domain: if status == 'assessed': supplier['domains']['assessed'].append(domain) else: supplier['domains']['unassessed'].append(domain) for e in ['domain_name', 'domain_status', 'data']: supplier.pop(e, None) sliced_results.append(supplier) return sliced_results, len(results)
# Make an array of years starting at 2006 and ending at this year (so it can be run at any time) this_year = datetime.datetime.now().year years = [] for i in range(2004, this_year+1): years.append(str(i)) # FPDS if 'FPDS' in models: dap = DetachedAwardProcurement # awards if 'award' in types: update_keys(dap, 'FPDS', 'award', years, func.concat( func.coalesce(dap.piid, '-none-'), '_', func.coalesce(dap.agency_id, '-none-'), '_', func.coalesce(dap.parent_award_id, '-none-'), '_', func.coalesce(dap.referenced_idv_agency_iden, '-none-'))) # IDV if 'IDV' in types: update_keys(dap, 'FPDS', 'IDV', years, func.concat('IDV_', func.coalesce(dap.piid, '-none-'), '_', func.coalesce(dap.agency_id, '-none-'))) # unpublished FABS if 'unpublishedFABS' in models: dafa = DetachedAwardFinancialAssistance # record type 1 if 'AGG' in types: update_keys(dafa, 'unpublished FABS', 'aggregate', None, func.concat('AGG_', func.coalesce(dafa.uri, '-none-'), '_',
and (addresses.email_address like :e1 or addresses.email_address like :e2) """) print conn.execute(s, x='m', y='z', e1='*****@*****.**', e2='*****@*****.**').fetchall() print '-----------------------------------' # same s = select(['users.fullname, addresses.email_address as title']).where( and_( 'users.id = addresses.user_id', 'users.name between "m" and "z"', '(addresses.email_address like :x or addresses.email_address like :y)' ) ).select_from('users, addresses') print conn.execute(s, x='*****@*****.**', y='*****@*****.**').fetchall() print '-----------------------------------' # bind own parameter s = users.select(users.c.name == bindparam('username')) print conn.execute(s, username='******').fetchall() print '-----------------------------------' s = users.select(users.c.name.like(bindparam('username', type_=String) + text("'%'"))) print conn.execute(s, username='******').fetchall() print '-----------------------------------' # fuctions print func.now() print func.concat('x', 'y') print '-----------------------------------'
def getUsers(): response = Response() # Ensure user has permission for this endpoint userId = authenticateRequest(response, request, mustBeManager=True) if response.hasError(): return response.getJson() # Ensure required input parameters are received required = [] optional = [ 'username', 'first_name', 'last_name', 'search_term', 'type', 'page_size', 'page' ] data = checkVars(response, request.values.to_dict(), required, optional) if response.hasError(): return response.getJson() # Setup database connection and table con = mimsDbEng.connect() users = Table('users', MetaData(mimsDbEng), autoload=True) # Get business of user making request stm = select([users]).where(users.c.id == userId) userBusiness = con.execute(stm).fetchone()['business'] # Setup main select statement stm = select([users]).where( and_(users.c.business == userBusiness, users.c.is_deleted == 0)) # Handle optional filters if 'username' in data: stm = stm.where(users.c.username.like('%' + data['username'] + '%')) if 'first_name' in data: stm = stm.where(users.c.first_name.like('%' + data['first_name'] + '%')) if 'last_name' in data: stm = stm.where(users.c.last_name.like('%' + data['last_name'] + '%')) if 'type' in data: stm = stm.where(users.c.type == data['type']) # Handle search_term if 'search_term' in data: search_term = '%' + data['search_term'] + '%' stm = stm.where( or_( users.c.first_name.like(search_term), users.c.last_name.like(search_term), users.c.username.like(search_term), func.concat(users.c.first_name, ' ', users.c.last_name).like(search_term))) # Handle page_size and page stm = setPagination(stm, data) response.data['users'] = resultSetToJson( con.execute(stm).fetchall(), [ 'password_hash', 'business', 'is_deleted', 'updated_datetime', 'creation_datetime' ]) con.close() for item in response.data['users']: item['id'] = int(item['id']) item['type'] = int(item['type']) return response.getJson()
def update_sokn(): if not set(["sysla", "sokn", "baer"]).issubset(set(request.json.keys())): abort(400) syslur = [{"value": "", "text": ""}] baeir = [{"value": "", "text": ""}] # Sysla sysla_query = (db.session.query( models.Danarbu.sysla_heiti).distinct().order_by( models.Danarbu.sysla_heiti)) if len(request.json["sokn"]) > 0: sysla_query = sysla_query.filter( models.Danarbu.sokn_heiti == request.json["sokn"].split(":")[0], models.Danarbu.sysla_heiti == request.json["sokn"].split(":")[1], ) # Baer baer_query = (db.session.query( func.concat( models.Danarbu.baer_heiti, " - ", models.Danarbu.sokn_heiti, " - ", models.Danarbu.sysla_heiti, ).label("svikakisa"), models.Danarbu.baer_heiti, models.Danarbu.sokn_heiti, models.Danarbu.sysla_heiti, ).distinct().order_by("svikakisa")) if len(request.json["sokn"]) > 0: baer_query = baer_query.filter( models.Danarbu.sokn_heiti == request.json["sokn"].split(":")[0], models.Danarbu.sysla_heiti == request.json["sokn"].split(":")[1], ) for sysla in sysla_query.all(): syslur.append({"value": sysla, "text": sysla}) for ( svikakisa, baer, sokn, sysla, ) in baer_query.all(): baeir.append({ "value": ":".join([baer, sokn, sysla]), "text": svikakisa }) sysla_value = (request.json["sokn"].split(":")[1] if len(request.json["sokn"]) > 0 else request.json["sysla"]) baer_value = (request.json["baer"] if len(request.json["baer"]) > 0 and request.json["baer"].split(":")[1] == request.json["sokn"].split(":")[0] and request.json["baer"].endswith( request.json["sokn"].split(":")[1]) else "") return jsonify({ "syslur": syslur, "baeir": baeir, "sysla_value": sysla_value, "baer_value": baer_value, })
def test_func(): print func.now() print func.concat('x', 'y') print func.current_timestamp()
def casestudies_search(): search_query = get_json_from_request() offset = get_nonnegative_int_or_400(request.args, 'from', 0) result_count = get_positive_int_or_400( request.args, 'size', current_app.config['DM_API_SUPPLIERS_PAGE_SIZE']) sort_dir = search_query.get('sort_dir', 'asc') sort_by = search_query.get('sort_by', None) domains = search_query.get('domains', None) seller_types = search_query.get('seller_types', None) search_term = search_query.get('search_term', None) framework_slug = request.args.get('framework', 'digital-marketplace') q = db.session.query(CaseStudy).join(Supplier).outerjoin(SupplierDomain).outerjoin(Domain) \ .outerjoin(SupplierFramework).outerjoin(Framework) q = q.filter( Supplier.status != 'deleted', or_(Framework.slug == framework_slug, ~Supplier.frameworks.any())) tsquery = None if search_term: if ' ' in search_term: tsquery = func.plainto_tsquery(search_term) else: tsquery = func.to_tsquery(search_term + ":*") q = q.add_column( func.ts_headline( 'english', func.concat(CaseStudy.data['approach'].astext, ' ', CaseStudy.data['role'].astext), tsquery, 'MaxWords=150, MinWords=75, ShortWord=3, HighlightAll=FALSE, FragmentDelimiter=" ... " ' )) else: q = q.add_column("''") q = q.add_column(Supplier.name) q = q.add_column(postgres.array_agg(Supplier.data)) q = q.group_by(CaseStudy.id, Supplier.name) if domains: d_agg = postgres.array_agg(cast(Domain.name, TEXT)) q = q.having(d_agg.contains(array(domains))) if seller_types: selected_seller_types = select( [postgres.array_agg(column('key'))], from_obj=func.json_each_text(Supplier.data[('seller_type', )]), whereclause=cast(column('value'), Boolean)).as_scalar() q = q.filter(selected_seller_types.contains(array(seller_types))) if sort_dir in ('desc', 'z-a'): ob = [desc(CaseStudy.data['title'].astext)] else: ob = [asc(CaseStudy.data['title'].astext)] if search_term: ob = [ desc( func.ts_rank_cd( func.to_tsvector( func.concat( Supplier.name, CaseStudy.data['title'].astext, CaseStudy.data['approach'].astext)), tsquery)) ] + ob condition = func.to_tsvector( func.concat(Supplier.name, CaseStudy.data['title'].astext, CaseStudy.data['approach'].astext)).op('@@')(tsquery) q = q.filter(condition) q = q.order_by(*ob) raw_results = list(q) results = [] for x in range(len(raw_results)): result = raw_results[x][0].serialize() if raw_results[x][1] is not None and raw_results[x][1] != '': result['approach'] = raw_results[x][1] if raw_results[x][2] is not None: result['supplierName'] = raw_results[x][2] if raw_results[x][3] is not None and raw_results[x][3][0] is not None: result['seller_type'] = raw_results[x][3][0].get('seller_type') results.append(result) total_results = len(results) sliced_results = results[offset:(offset + result_count)] result = { 'hits': { 'total': total_results, 'hits': [{ '_source': r } for r in sliced_results] } } try: response = jsonify(result), 200 except Exception as e: response = jsonify(message=str(e)), 500 return response
# Make an array of years starting at 2006 and ending at this year (so it can be run at any time) this_year = datetime.datetime.now().year years = [] for i in range(2004, this_year+1): years.append(str(i)) # FPDS if 'FPDS' in models: dap = DetachedAwardProcurement # awards if 'award' in types: update_keys(dap, 'FPDS', 'award', years, func.concat('CONT_AWD_', func.coalesce(dap.piid, '-none-'), '_', func.coalesce(dap.agency_id, '-none-'), '_', func.coalesce(dap.parent_award_id, '-none-'), '_', func.coalesce(dap.referenced_idv_agency_iden, '-none-'))) # IDV if 'IDV' in types: update_keys(dap, 'FPDS', 'IDV', years, func.concat('CONT_IDV_', func.coalesce(dap.piid, '-none-'), '_', func.coalesce(dap.agency_id, '-none-'))) # unpublished FABS if 'unpublishedFABS' in models: dafa = DetachedAwardFinancialAssistance # record type 1 if 'AGG' in types: update_keys(dafa, 'unpublished FABS', 'aggregate', None, func.concat('ASST_AGG_', func.coalesce(dafa.uri, '-none-'), '_',
def year_groups_ajax(): year_filter = request.args.get('year_filter') cohort_filter = request.args.get('cohort_filter') prog_filter = request.args.get('prog_filter') type_filter = request.args.get('type_filter') if year_filter not in ['all', '1', '2', '3', '4', 'twd']: year_filter = 'all' flag, value = is_integer(year_filter) if year_filter == 'twd': base_query = db.session.query(StudentData) \ .join(User, User.id == StudentData.id) \ .filter(User.active, StudentData.intermitting) \ .join(DegreeProgramme, DegreeProgramme.id == StudentData.programme_id) \ .join(DegreeType, DegreeType.id == DegreeProgramme.type_id) elif year_filter == 'all' or not flag: base_query = db.session.query(StudentData) \ .join(User, User.id == StudentData.id) \ .join(DegreeProgramme, DegreeProgramme.id == StudentData.programme_id) \ .join(DegreeType, DegreeType.id == DegreeProgramme.type_id) \ .filter(and_(User.active, StudentData.academic_year <= DegreeType.duration)) else: base_query = db.session.query(StudentData) \ .join(User, User.id == StudentData.id) \ .join(DegreeProgramme, DegreeProgramme.id == StudentData.programme_id) \ .join(DegreeType, DegreeType.id == DegreeProgramme.type_id) \ .filter(and_(User.active, StudentData.academic_year <= DegreeType.duration, StudentData.academic_year == value)) name = { 'search': func.concat(User.first_name, ' ', User.last_name), 'order': [User.last_name, User.first_name], 'search_collation': 'utf8_general_ci' } programme = {'order': DegreeProgramme.name} columns = {'name': name, 'programme': programme} if cohort_filter is not None: flag, value = is_integer(cohort_filter) if flag: base_query = base_query.filter(StudentData.cohort == value) if prog_filter is not None: flag, value = is_integer(prog_filter) if flag: base_query = base_query.filter(DegreeProgramme.id == value) if type_filter is not None: flag, value = is_integer(type_filter) if flag: base_query = base_query.filter(DegreeType.id == value) with ServerSideHandler(request, base_query, columns) as handler: return handler.build_payload(ajax.reports.year_groups)
async def get_full_employee_name(): """Return concat function with full employee name.""" blank = text("' '") full_name = [employees.c.title_of_courtesy, blank, employees.c.first_name, blank, employees.c.last_name] return func.concat(*full_name).label('employee')
def analytics_query(self, start, end, locations=None, library=None): """Build a database query that fetches rows of analytics data. This method uses low-level SQLAlchemy code to do all calculations and data conversations in the database. It's modeled after Work.to_search_documents, which generates a large JSON document entirely in the database. :return: An iterator of results, each of which can be written directly to a CSV file. """ clauses = [ CirculationEvent.start >= start, CirculationEvent.start < end, ] if locations: event_types = [ CirculationEvent.CM_CHECKOUT, CirculationEvent.CM_FULFILL, CirculationEvent.OPEN_BOOK, ] locations = locations.strip().split(",") clauses += [ CirculationEvent.type.in_(event_types), CirculationEvent.location.in_(locations), ] if library: clauses += [CirculationEvent.library == library] # Build the primary query. This is a query against the # CirculationEvent table and a few other tables joined against # it. This makes up the bulk of the data. events_alias = ( select( [ func.to_char(CirculationEvent.start, "YYYY-MM-DD HH24:MI:SS").label( "start" ), CirculationEvent.type.label("event_type"), Identifier.identifier, Identifier.type.label("identifier_type"), Edition.sort_title, Edition.sort_author, case( [(Work.fiction == True, literal_column("'fiction'"))], else_=literal_column("'nonfiction'"), ).label("fiction"), Work.id.label("work_id"), Work.audience, Edition.publisher, Edition.imprint, Edition.language, CirculationEvent.location, ], ) .select_from( join( CirculationEvent, LicensePool, CirculationEvent.license_pool_id == LicensePool.id, ) .join(Identifier, LicensePool.identifier_id == Identifier.id) .join(Work, Work.id == LicensePool.work_id) .join(Edition, Work.presentation_edition_id == Edition.id) ) .where(and_(*clauses)) .order_by(CirculationEvent.start.asc()) .alias("events_alias") ) # A subquery can hook into the main query by referencing its # 'work_id' field in its WHERE clause. work_id_column = literal_column( events_alias.name + "." + events_alias.c.work_id.name ) # This subquery gets the names of a Work's genres as a single # comma-separated string. # # This Alias selects some number of rows, each containing one # string column (Genre.name). Genres with higher affinities with # this work go first. genres_alias = ( select([Genre.name.label("genre_name")]) .select_from(join(WorkGenre, Genre, WorkGenre.genre_id == Genre.id)) .where(WorkGenre.work_id == work_id_column) .order_by(WorkGenre.affinity.desc(), Genre.name) .alias("genres_subquery") ) # Use array_agg() to consolidate the rows into one row -- this # gives us a single value, an array of strings, for each # Work. Then use array_to_string to convert the array into a # single comma-separated string. genres = select( [func.array_to_string(func.array_agg(genres_alias.c.genre_name), ",")] ).select_from(genres_alias) # This subquery gets the a Work's target age as a single string. # # This Alias selects two fields: the lower and upper bounds of # the Work's target age. This reuses code originally written # for Work.to_search_documents(). target_age = Work.target_age_query(work_id_column).alias("target_age_subquery") # Concatenate the lower and upper bounds with a dash in the # middle. If both lower and upper bound are empty, just give # the empty string. This simulates the behavior of # Work.target_age_string. target_age_string = select( [ case( [ ( or_(target_age.c.lower != None, target_age.c.upper != None), func.concat(target_age.c.lower, "-", target_age.c.upper), ) ], else_=literal_column("''"), ) ] ).select_from(target_age) # Build the main query out of the subqueries. events = events_alias.c query = select( [ events.start, events.event_type, events.identifier, events.identifier_type, events.sort_title, events.sort_author, events.fiction, events.audience, events.publisher, events.imprint, events.language, target_age_string.label("target_age"), genres.label("genres"), events.location, ] ).select_from(events_alias) return query
where users.id = addresses.user_id and users.name between :x and :y and (addresses.email_address like :e1 or addresses.email_address like :e2) """) print conn.execute(s, x='m', y='z', e1='*****@*****.**', e2='*****@*****.**').fetchall() print '-----------------------------------' # same s = select(['users.fullname, addresses.email_address as title']).where( and_( 'users.id = addresses.user_id', 'users.name between "m" and "z"', '(addresses.email_address like :x or addresses.email_address like :y)') ).select_from('users, addresses') print conn.execute(s, x='*****@*****.**', y='*****@*****.**').fetchall() print '-----------------------------------' # bind own parameter s = users.select(users.c.name == bindparam('username')) print conn.execute(s, username='******').fetchall() print '-----------------------------------' s = users.select( users.c.name.like(bindparam('username', type_=String) + text("'%'"))) print conn.execute(s, username='******').fetchall() print '-----------------------------------' # fuctions print func.now() print func.concat('x', 'y') print '-----------------------------------'
def position(cls): prop = hybrid_property(lambda self: str(self)) prop.expression( lambda self: func.concat(Contig.name, ':', self.start)) return prop
def create_ssb_dropdown(search_form): syslur = [("", "")] soknir = [("", "")] baeir = [("", "")] sysla_query = (db.session.query( models.Danarbu.sysla_heiti).distinct().order_by( models.Danarbu.sysla_heiti)) if is_relevant(search_form.baer_select): sysla_query = sysla_query.filter( models.Danarbu.sysla_heiti == search_form.baer_select.data.split( ":")[2]) elif is_relevant(search_form.sokn_select): sysla_query = sysla_query.filter( models.Danarbu.sysla_heiti == search_form.sokn_select.data.split( ":")[1]) for (sysla, ) in sysla_query.all(): syslur.append((sysla, sysla)) # Sokn sokn_query = (db.session.query( func.concat(models.Danarbu.sokn_heiti, " - ", models.Danarbu.sysla_heiti).label("svikakisa"), models.Danarbu.sokn_heiti, models.Danarbu.sysla_heiti, ).distinct().order_by("svikakisa")) if is_relevant(search_form.baer_select): sokn_query = sokn_query.filter( models.Danarbu.sokn_heiti == search_form.baer_select.data.split( ":")[1], models.Danarbu.sysla_heiti == search_form.baer_select.data.split( ":")[2], ) elif is_relevant(search_form.sysla_select): sokn_query = sokn_query.filter( models.Danarbu.sysla_heiti == search_form.sysla_select.data) for ( svikakisa, sokn, sysla, ) in sokn_query.all(): soknir.append((":".join([sokn, sysla]), svikakisa)) # Baer baer_query = (db.session.query( func.concat( models.Danarbu.baer_heiti, " - ", models.Danarbu.sokn_heiti, " - ", models.Danarbu.sysla_heiti, ).label("svikakisa"), models.Danarbu.baer_heiti, models.Danarbu.sokn_heiti, models.Danarbu.sysla_heiti, ).distinct().order_by("svikakisa")) if is_relevant(search_form.sokn_select): baer_query = baer_query.filter( models.Danarbu.sokn_heiti == search_form.sokn_select.data.split( ":")[0], models.Danarbu.sysla_heiti == search_form.sokn_select.data.split( ":")[1], ) elif is_relevant(search_form.sysla_select): baer_query = baer_query.filter( models.Danarbu.sysla_heiti == search_form.sysla_select.data) for ( svikakisa, baer, sokn, sysla, ) in baer_query.all(): baeir.append((":".join([baer, sokn, sysla]), svikakisa)) return syslur, soknir, baeir
def casestudies_search(): search_query = get_json_from_request() offset = get_nonnegative_int_or_400(request.args, 'from', 0) result_count = get_positive_int_or_400(request.args, 'size', current_app.config['DM_API_SUPPLIERS_PAGE_SIZE']) sort_dir = search_query.get('sort_dir', 'asc') sort_by = search_query.get('sort_by', None) domains = search_query.get('domains', None) seller_types = search_query.get('seller_types', None) search_term = search_query.get('search_term', None) framework_slug = request.args.get('framework', 'digital-marketplace') q = db.session.query(CaseStudy).join(Supplier).outerjoin(SupplierDomain).outerjoin(Domain) \ .outerjoin(SupplierFramework).outerjoin(Framework) q = q.filter(Supplier.status != 'deleted', or_(Framework.slug == framework_slug, ~Supplier.frameworks.any())) tsquery = None if search_term: if ' ' in search_term: tsquery = func.plainto_tsquery(search_term) else: tsquery = func.to_tsquery(search_term + ":*") q = q.add_column(func.ts_headline( 'english', func.concat( CaseStudy.data['approach'].astext, ' ', CaseStudy.data['role'].astext), tsquery, 'MaxWords=150, MinWords=75, ShortWord=3, HighlightAll=FALSE, FragmentDelimiter=" ... " ' )) else: q = q.add_column("''") q = q.add_column(Supplier.name) q = q.add_column(postgres.array_agg(Supplier.data)) q = q.group_by(CaseStudy.id, Supplier.name) if domains: d_agg = postgres.array_agg(cast(Domain.name, TEXT)) q = q.having(d_agg.contains(array(domains))) if seller_types: selected_seller_types = select( [postgres.array_agg(column('key'))], from_obj=func.json_each_text(Supplier.data[('seller_type',)]), whereclause=cast(column('value'), Boolean) ).as_scalar() q = q.filter(selected_seller_types.contains(array(seller_types))) if sort_dir in ('desc', 'z-a'): ob = [desc(CaseStudy.data['title'].astext)] else: ob = [asc(CaseStudy.data['title'].astext)] if search_term: ob = [desc(func.ts_rank_cd(func.to_tsvector( func.concat(Supplier.name, CaseStudy.data['title'].astext, CaseStudy.data['approach'].astext)), tsquery))] + ob condition = func.to_tsvector(func.concat(Supplier.name, CaseStudy.data['title'].astext, CaseStudy.data['approach'].astext)).op('@@')(tsquery) q = q.filter(condition) q = q.order_by(*ob) raw_results = list(q) results = [] for x in range(len(raw_results)): result = raw_results[x][0].serialize() if raw_results[x][1] is not None and raw_results[x][1] != '': result['approach'] = raw_results[x][1] if raw_results[x][2] is not None: result['supplierName'] = raw_results[x][2] if raw_results[x][3] is not None and raw_results[x][3][0] is not None: result['seller_type'] = raw_results[x][3][0].get('seller_type') results.append(result) total_results = len(results) sliced_results = results[offset:(offset + result_count)] result = { 'hits': { 'total': total_results, 'hits': [{'_source': r} for r in sliced_results] } } try: response = jsonify(result), 200 except Exception as e: response = jsonify(message=str(e)), 500 return response