def get(self, bag): query = g.tran.query(db.Companies) \ .filter_by(_deleted='infinity', _id=bag['id']) doc_vars = vars(db.Companies) for var in doc_vars: if isinstance(doc_vars[var], InstrumentedAttribute): query = query.add_column(doc_vars[var]) if 'with_related' in bag and bag['with_related'] is True: company_status_value = g.tran.query( func.row_to_json(text('enums.*'))).select_from(db.Enums) \ .filter_by(_deleted='infinity', name='company_status') \ .filter(db.Enums.data['key'].cast(TEXT) == cast(db.Companies.company_status, TEXT)) \ .as_scalar().label('company_status_value') company_type_value = g.tran.query( func.row_to_json(text('enums.*'))).select_from(db.Enums) \ .filter_by(_deleted='infinity', name='company_type') \ .filter(db.Enums.data['key'].cast(TEXT) == cast(db.Companies.company_type, TEXT)) \ .as_scalar().label('company_type_value') entry_user = g.tran.query(func.json_build_object( "id", db.User.id, "username", db.User.username, "email", db.User.email, "rec_date", db.User.rec_date, "data", db.User.data, "role", db.User.role)).select_from(db.User).filter_by( id=db.Companies.entry_user_id) \ .as_scalar().label('entry_user') typeofownership = g.tran.query(func.row_to_json(text('typeofownership.*'))).select_from(db.Typeofownership) \ .filter_by(_deleted='infinity', _id=db.Companies.typeofownership_id).as_scalar() \ .label('typeofownership') dircountry = g.tran.query(func.row_to_json(text('dircountry.*'))).select_from(db.DirCountry) \ .filter_by(_deleted='infinity', _id=db.Companies.dircountry_id).as_scalar() \ .label('dircountry') dircoate = g.tran.query(func.row_to_json(text('dircoate.*'))).select_from(db.DirCoate) \ .filter_by(_deleted='infinity', _id=db.Companies.dircoate_id).as_scalar() \ .label('dircoate') roles = g.tran.query(func.jsonb_agg(func.row_to_json(text('roles.*')))).select_from(db.Roles) \ .filter_by(_deleted='infinity') \ .filter(type_coerce(db.Companies.roles_id).has_any(array([db.Roles._id]))) \ .as_scalar().label('roles') company_users = g.tran.query(db.Companyemployees.user_id).filter_by(_deleted='infinity', company_id=bag['id']).all() company_users = [] for user_id in company_users: user = g.tran.query(func.json_build_object( "id", db.User.id, "username", db.User.username, "email", db.User.email, "rec_date", db.User.rec_date, "data", db.User.data, "role", db.User.role)).select_from(db.User) \ .filter_by(id=user_id).first() company_users.append(user) query = query.add_columns(company_status_value, company_type_value, entry_user, company_users, typeofownership, dircountry, roles, dircoate) company = query.one() return {'doc': orm_to_json(company)}
def find_relations(query): branch = g.tran.query(func.row_to_json(text('branches.*'))).select_from(db.Branches) \ .filter_by(_deleted='infinity', _id=db.Operations.branch_id).as_scalar().label('branch') purpose = g.tran.query(func.row_to_json(text('costcenterpurposes.*'))).select_from(db.CostCenterPurposes) \ .filter_by(_deleted='infinity', _id=db.Operations.purpose_id).as_scalar().label('purpose') document = g.tran.query(func.row_to_json(text('document.*'))).select_from(db.Document) \ .filter_by(_deleted='infinity', _id=db.Operations.document_id).as_scalar().label('document') product = g.tran.query(func.row_to_json(text('products.*'))).select_from(db.Products) \ .filter_by(_deleted='infinity', _id=db.Operations.product_id).as_scalar().label('product') unit = g.tran.query(func.row_to_json(text('units.*'))).select_from(db.Units) \ .filter_by(_deleted='infinity', _id=db.Operations.unit_id).as_scalar().label('unit') currency = g.tran.query(func.row_to_json(text('currencies.*'))).select_from(db.Currencies) \ .filter_by(_deleted='infinity', _id=db.Operations.currency_id).as_scalar().label('currency') operation_status = g.tran.query(func.row_to_json(text('enums.*'))).select_from(db.Enums) \ .filter_by(_deleted='infinity', name='operation_status') \ .filter(db.Enums.data['key'].cast(TEXT) == cast(db.Operations.operation_status, TEXT)) \ .as_scalar().label('operation_status_value') user = g.tran.query(func.json_build_object( "id", db.User.id, "username", db.User.username, "email", db.User.email, "rec_date", db.User.rec_date, "data", db.User.data, "role", db.User.role)).select_from(db.User).filter_by(id=db.UserCompany.user_id) \ .as_scalar().label('user') employee = g.tran.query(func.json_build_object('_id', db.UserCompany._id, '_rev', db.UserCompany._rev, '_created', db.UserCompany._created, 'user_id', db.UserCompany.user_id, 'company_id', db.UserCompany.company_id, 'entry_user_id', db.UserCompany.entry_user_id, 'branches_id', db.UserCompany.branches_id, 'roles_id', db.UserCompany.roles_id, 'access', db.UserCompany.access, 'user', user)).select_from(db.UserCompany) \ .filter_by(_deleted='infinity', company_id=db.Operations.company_id, user_id=db.Operations.employee_id)\ .as_scalar().label('employee') contractor = g.tran.query(func.row_to_json(text('contractors.*'))).select_from(db.Contractors) \ .filter_by(_deleted='infinity', _id=db.Operations.contractor_id).as_scalar().label('contractor') branch2 = g.tran.query(func.row_to_json(text('branches.*'))).select_from(db.Branches) \ .filter_by(_deleted='infinity', _id=db.Operations.from_branch_id).as_scalar().label('branch2') entry_user = g.tran.query(func.json_build_object( "id", db.User.id, "username", db.User.username, "email", db.User.email, "rec_date", db.User.rec_date, "data", db.User.data, "role", db.User.role)).select_from(db.User).filter_by(id=db.Operations.entry_user_id)\ .as_scalar().label('entry_user') query = query.add_columns(branch, purpose, document, product, unit, currency, contractor, branch2, entry_user, employee, operation_status) return query
def update_service(services_id, user_connected_model, user_connected_schema): """ Update one service by artist """ _u_model = user_connected_model service_to_update = _u_model.services.filter_by(id=services_id).first() if service_to_update: data, error = validate_data(service_schema, request, return_dict=False) if error: return custom_response(data, 400) new_galleries = check_galleries_files(request, _u_model) if len(new_galleries) != 0: data['galleries'] = list( set(data.get("galleries", []) + new_galleries)) if data['special_dates']: data['special_dates'] = func.json_build_object( *convert_dict_to_sql_json(data['special_dates'])) for link in service_to_update.galleries: if link not in data['galleries']: destroy_image(link, CLOUD_IMAGES_SERVICES_TYPE, _u_model.fileStorage_key, _u_model.id) service_to_update.update(data) return custom_response( return_services(service_schema.dump(service_to_update), service_to_update), 200) return custom_response("service not found", 200)
def jsonify_teams(query): """Converts a table to JSON format. This is soooo ugly. Args: query (flask_sqlalchemy.BaseQuery): Self-joining query Returns: flask_sqlalchemy.BaseQuery: Query returning one JSON blob per top-level team """ json_col = None num_joins = int(len(query.first()) / 2) for i in range(num_joins, 1, -1): ids = [column(f'id_{j}') for j in range(1, i)] names = [column(f'name_{j}') for j in range(1, i)] json_cols = ['id', column(f'id_{i}'), 'name', column(f'name_{i}')] if json_col is not None: json_cols.extend(['subteams', column('subteams')]) json_col = func.json_strip_nulls(func.json_build_object(*json_cols)) query = query.from_self(func.array_agg(json_col).label('subteams'), *(ids+names)) \ .group_by(*(ids+names)) query = query.from_self( column('id_1').label('id'), column('name_1').label('name'), column('subteams')) return query
def get_clusters_per_point_query(self, session, gridpoints, riskdate): days_prev = self.dycast_parameters.temporal_domain enddate = riskdate startdate = riskdate - datetime.timedelta(days=(days_prev)) points_query = self.get_points_query_from_grid(gridpoints) # close_space_and_time_query = self.get_close_space_and_time_query(session) # case_subquery = session.query(Case).subquery() # daily_case_query = session.query(Case) \ # .join(points_query, literal(True)) \ # .filter(Case.report_date >= startdate, # Case.report_date <= enddate) return session.query(func.array_agg( func.json_build_object( Case.id, Case.location )).label('case_array'), points_query.c.point.geom.label('point')) \ .join(points_query, literal(True)) \ .filter(Case.report_date >= startdate, Case.report_date <= enddate, func.ST_DWithin(Case.location, points_query.c.point.geom, self.dycast_parameters.spatial_domain)) \ .group_by(points_query.c.point.geom)
def get_fields_query(self): labels_query = select([ models.session_label.c.session_id, models.session_label.c.label_id ]).where(models.session_label.c.session_id == models.Session.id).distinct().correlate( models.Session).alias() return select([ label("_type", text("'session'")), label("_index", text("'session'")), label("_id", models.Session.id), *[ getattr(models.Session, column_name) for column_name in models.Session.__table__.columns.keys() if column_name not in {'timespan', 'parameters'} ], models.User.email.label('user_email'), select([ func.array_agg( func.json_build_object('timestamp', models.Error.timestamp, 'message', models.Error.message)) ]).where(models.Error.session_id == models.Session.id).label( 'session_errors'), select([ func.array_agg( func.json_build_object('timestamp', models.Warning.timestamp, 'message', models.Warning.message)) ]).where(models.Warning.session_id == models.Session.id).label( 'session_warnings'), select([ func.array_agg( func.json_build_object("name", models.Label.name)) ]).select_from( labels_query.join( models.Label, models.Label.id == labels_query.c.label_id)).label( 'session_labels'), select([ func.json_object_agg(models.SessionMetadata.key, models.SessionMetadata.metadata_item) ]).where(models.SessionMetadata.session_id == models.Session.id).label('session_metadata'), ]).select_from( models.Session.__table__.outerjoin( models.User.__table__, models.Session.user_id == models.User.id))
def _add_relations(query, catalog_name, collection_name): gob_model = GOBModel() collection = gob_model.get_collection(catalog_name, collection_name) has_states = collection.get('has_states', False) src_table, _ = get_table_and_model(catalog_name, collection_name) for reference in collection['references']: relation_name = get_relation_name(gob_model, catalog_name, collection_name, reference) if not relation_name: continue rel_table, _ = get_table_and_model('rel', relation_name) select_attrs = [ getattr(rel_table, 'src_id'), getattr(rel_table, 'src_volgnummer'), ] if has_states else [ getattr(rel_table, 'src_id'), ] subselect = session \ .query( *select_attrs, func.json_agg( func.json_build_object( FIELD.SOURCE_VALUE, getattr(rel_table, FIELD.SOURCE_VALUE), FIELD.REFERENCE_ID, getattr(rel_table, 'dst_id') ) ).label('source_values') ).filter( and_( getattr(rel_table, FIELD.DATE_DELETED).is_(None), or_( getattr(rel_table, FIELD.EXPIRATION_DATE).is_(None), getattr(rel_table, FIELD.EXPIRATION_DATE) > func.now() ) ) ).group_by( *select_attrs ).subquery() join_clause = [ getattr(src_table, FIELD.ID) == getattr(subselect.c, 'src_id'), getattr(src_table, FIELD.SEQNR) == getattr(subselect.c, 'src_volgnummer') ] if has_states else [ getattr(src_table, FIELD.ID) == getattr(subselect.c, 'src_id'), ] query = query.join(subselect, and_(*join_clause), isouter=True) \ .add_columns( getattr(subselect.c, 'source_values').label(f"ref:{reference}") ) return query
def venues(): # TODO: replace with real venues data. # num_shows should be aggregated based on number of upcoming shows per venue. # data=[{ # "city": "San Francisco", # "state": "CA", # "venues": [{ # "id": 1, # "name": "The Musical Hop", # "num_upcoming_shows": 0, # }, { # "id": 3, # "name": "Park Square Live Music & Coffee", # "num_upcoming_shows": 1, # }] # }, { # "city": "New York", # "state": "NY", # "venues": [{ # "id": 2, # "name": "The Dueling Pianos Bar", # "num_upcoming_shows": 0, # }] # }] query = db.session.query(func.json_build_object( 'city', Venue.city, 'state', Venue.state, 'venues', func.json_agg(func.json_build_object( 'id', Venue.id, 'name', Venue.name, 'num_upcoming_shows', len(get_upcoming_or_past_shows(Venue, 'venue_id')['upcoming_shows']) ) ))).group_by(Venue.city, Venue.state).all() # Remove inner tuples data = [q[0] for q in query] return render_template('pages/venues.html', areas=data)
def update_note(stars_model, stars_, data, user_connected_model): try: if stars_['users_who_rated'].get(str(user_connected_model.id)) is not None: stars_['note'].remove(stars_['users_who_rated'].get(str(auth.user.get('id')))) except KeyError: pass finally: note = data['note'].pop() stars_["users_who_rated"][str(user_connected_model.id)] = note stars_['note'].append(note) stars_["users_who_rated"] = func.json_build_object(*convert_dict_to_sql_json(stars_["users_who_rated"])) stars_model.update(stars_) return custom_response({"note": mean(stars_['note'])}, 200)
def get_agencies(self): subquery = (db.session.query( AgencyDomain.agency_id, func.json_agg( func.json_build_object( 'id', AgencyDomain.id, 'domain', AgencyDomain.domain, 'active', AgencyDomain.active)).label('domains')).group_by( AgencyDomain.agency_id).subquery()) result = (db.session.query( Agency.id, Agency.name, Agency.domain, Agency.category, Agency.state, Agency.body_type, Agency.whitelisted, Agency.reports, subquery.c.domains).join( subquery, subquery.c.agency_id == Agency.id).order_by(Agency.name).all()) return [r._asdict() for r in result]
def update_my_global_function(user_connected_model, user_connected_schema): data, error = validate_data(condition_globals_schema, request) if error: return custom_response(data, 400) if data['refund_policy'] not in refund_allowed_type: return custom_response("refund not support", 400) if data['travel_expenses']: data['travel_expenses'] = func.json_build_object( *convert_dict_to_sql_json(data['travel_expenses'])) user_condition_globals = user_connected_model.condition_globals[0] user_condition_globals.update(data) return custom_response("updated", 200)
async def get_notice_product_iter(self): """ SELECT sub.user_id, p.id as product_id, p.name as product_name, p.url as product_url, s.label as shop_label, p.parameters as product_parameters, json_agg(json_build_object('data', ns.data, 'parameters', ps.parameters, 'discount', ps.discount)) FROM sub_user_stock_ix as subi INNER JOIN notice_stock as ns ON ns.stock_id = subi.stock_id INNER JOIN sub_user as sub ON sub.id = subi.sub_id INNER JOIN product_stock as ps ON ps.id = subi.stock_id INNER JOIN product as p ON p.id = ps.product_id INNER JOIN shop as s ON s.id = p.shop_id GROUP BY sub.user_id, p.id, s.id ORDER BY sub.user_id, p.id """ async with db.engine.acquire() as db_connection: select_exists = sa.select((sa.exists(sa.select((notice_stock.c.stock_id, ))),)) res = await db_connection.execute(select_exists) exists = await res.scalar() if not exists: return query = sa.select(( sub_user.c.user_id, product.c.id.label('product_id'), product.c.name.label('product_name'), product.c.reference.label('product_reference'), product.c.url.label('product_url'), product.c.parameters.label('product_parameters'), func.json_agg(func.json_build_object( 'data', notice_stock.c.data, 'parameters', product_stock.c.parameters, 'discount', product_stock.c.discount, )).label('notice_data'), shop.c.label.label('shop_label') ))\ .select_from( sub_user_stock_ix .join(notice_stock, notice_stock.c.stock_id == sub_user_stock_ix.c.stock_id) .join(sub_user, sub_user.c.id == sub_user_stock_ix.c.sub_id) .join(product_stock, product_stock.c.id == sub_user_stock_ix.c.stock_id) .join(product, product.c.id == product_stock.c.product_id) .join(shop, shop.c.id == product.c.shop_id) )\ .group_by(sub_user.c.user_id, product.c.id, shop.c.id)\ .order_by(sub_user.c.user_id, product.c.id) async for row in db_connection.execute(query): yield row
def get_suppliers_with_expiring_documents(self, days): today = datetime.now(pytz.timezone('Australia/Sydney')) # Find out which of the supplier's documents have expired or are expiring soon liability = (select([Supplier.code, Supplier.name, literal('liability').label('type'), Supplier.data['documents']['liability']['expiry'].astext.label('expiry')]) .where(and_(Supplier.data['documents']['liability']['expiry'].isnot(None), func.to_date(Supplier.data['documents']['liability']['expiry'].astext, 'YYYY-MM-DD') == (today.date() + timedelta(days=days))))) workers = (select([Supplier.code, Supplier.name, literal('workers').label('type'), Supplier.data['documents']['workers']['expiry'].astext.label('expiry')]) .where(and_(Supplier.data['documents']['workers']['expiry'].isnot(None), func.to_date(Supplier.data['documents']['workers']['expiry'].astext, 'YYYY-MM-DD') == (today.date() + timedelta(days=days))))) expiry_dates = union(liability, workers).alias('expiry_dates') # Aggregate the document details so they can be returned with the results documents = (db.session.query(expiry_dates.columns.code, expiry_dates.columns.name, func.json_agg( func.json_build_object( 'type', expiry_dates.columns.type, 'expiry', expiry_dates.columns.expiry)).label('documents')) .group_by(expiry_dates.columns.code, expiry_dates.columns.name) .subquery('expired_documents')) # Find email addresses associated with the supplier email_addresses = self.get_supplier_contacts_union() # Aggregate the email addresses so they can be returned with the results aggregated_emails = (db.session.query(email_addresses.columns.code, func.json_agg( email_addresses.columns.email_address ).label('email_addresses')) .group_by(email_addresses.columns.code) .subquery()) # Combine the list of email addresses and documents results = (db.session.query(documents.columns.code, documents.columns.name, documents.columns.documents, aggregated_emails.columns.email_addresses) .join(aggregated_emails, documents.columns.code == aggregated_emails.columns.code) .order_by(documents.columns.code) .all()) return [r._asdict() for r in results]
def get_suppliers_with_expiring_documents(self, days): today = datetime.now(pytz.timezone('Australia/Sydney')) # Find out which of the supplier's documents have expired or are expiring soon liability = (select([Supplier.code, Supplier.name, literal('liability').label('type'), Supplier.data['documents']['liability']['expiry'].astext.label('expiry')]) .where(and_(Supplier.data['documents']['liability']['expiry'].isnot(None), func.to_date(Supplier.data['documents']['liability']['expiry'].astext, 'YYYY-MM-DD') == (today.date() + timedelta(days=days))))) workers = (select([Supplier.code, Supplier.name, literal('workers').label('type'), Supplier.data['documents']['workers']['expiry'].astext.label('expiry')]) .where(and_(Supplier.data['documents']['workers']['expiry'].isnot(None), func.to_date(Supplier.data['documents']['workers']['expiry'].astext, 'YYYY-MM-DD') == (today.date() + timedelta(days=days))))) expiry_dates = union(liability, workers).alias('expiry_dates') # Aggregate the document details so they can be returned with the results documents = (db.session.query(expiry_dates.columns.code, expiry_dates.columns.name, func.json_agg( func.json_build_object( 'type', expiry_dates.columns.type, 'expiry', expiry_dates.columns.expiry)).label('documents')) .group_by(expiry_dates.columns.code, expiry_dates.columns.name) .subquery('expired_documents')) # Find email addresses associated with the supplier email_addresses = self.get_supplier_contacts_union() # Aggregate the email addresses so they can be returned with the results aggregated_emails = (db.session.query(email_addresses.columns.code, func.json_agg( email_addresses.columns.email_address ).label('email_addresses')) .group_by(email_addresses.columns.code) .subquery()) # Combine the list of email addresses and documents results = (db.session.query(documents.columns.code, documents.columns.name, documents.columns.documents, aggregated_emails.columns.email_addresses) .join(aggregated_emails, documents.columns.code == aggregated_emails.columns.code) .order_by(documents.columns.code) .all()) return [r._asdict() for r in results]
def get_agency(self, agency_id): subquery = (db.session.query( AgencyDomain.agency_id, func.json_agg( func.json_build_object( 'id', AgencyDomain.id, 'domain', AgencyDomain.domain, 'active', AgencyDomain.active)).label('domains')).group_by( AgencyDomain.agency_id).subquery()) result = (db.session.query( Agency.id, Agency.name, Agency.domain, Agency.category, Agency.state, Agency.body_type, Agency.whitelisted, Agency.reports, Agency.must_join_team, subquery.c.domains).join(subquery, subquery.c.agency_id == Agency.id).filter( Agency.id == agency_id).one_or_none()) return result._asdict()
def convert_dict_to_sql_json(data_dict=None, data_list=None): """ @param data_list: dict to transform @param data_dict: list to transform @return: tuple of key and value """ if data_dict: data_list = list(reduce(lambda x, y: x + y, data_dict.items())) for index, value in enumerate(data_list): if isinstance(value, dict): data_list[index] = func.json_build_object( *convert_dict_to_sql_json( None, list(reduce(lambda x, y: x + y, value.items())))) return data_list
def _inject_omics_data_summary(self, db: Session, query: Query) -> Query: aggs = [] for omics_class in workflow_search_classes: pipeline_model = omics_class().table.model table_name = pipeline_model.__tablename__ # type: ignore filter_conditions = [ c for c in self.conditions if c.table.value in {"omics_processing", table_name, "biosample"} ] query_schema = omics_class(conditions=filter_conditions) omics_subquery = self._count_omics_data_query( db, query_schema).subquery() study_id = getattr(omics_subquery.c, f"{table_name}_study_id") query = query.join( omics_subquery, self.table.model.id == study_id, # type: ignore isouter=True, ) aggs.append( func.json_build_object( "type", table_name, "count", getattr(omics_subquery.c, f"{table_name}_count"))) op_filter_conditions = [ c for c in self.conditions if c.table.value in {"omics_processing", "biosample"} ] op_summary_subquery = self._count_omics_processing_summary( db, op_filter_conditions).subquery() query = query.join( op_summary_subquery, op_summary_subquery.c.omics_processing_study_id == models.Study.id, isouter=True, ) aggregation = func.json_build_array(*aggs) return query.populate_existing().options( with_expression(models.Study.omics_counts, aggregation), with_expression( models.Study.omics_processing_counts, op_summary_subquery.c.omics_processing_summary, ), )
def get_facts_for_document(session, gkg_id=None): # select the facts that match the filters facts = (session.query( Fact.id.label('fact_id'), Fact.excerpt_start.label('excerpt_start'), Fact.excerpt_end.label('excerpt_end'), Fact.unit.label('unit'), Fact.term.label('term'), Fact.specific_reported_figure.label('specific_reported_figure'), Fact.vague_reported_figure.label('vague_reported_figure'), Fact.iso3.label('iso3'), Fact.tag_locations.label('tags'), func.array_agg( func.json_build_object( 'location_name', Location.location_name, 'location_type', Location.location_type, 'iso3', Country.iso3, 'country_name', Country.preferred_term, 'latlong', Location.latlong)).label('locations')).join( Fact, Analysis.facts).join(Location, Fact.locations).join( Country, Location.country).group_by(Fact).filter( Analysis.gkg_id == gkg_id)) return [dict(r.items()) for r in session.execute(facts)]
def update_one_option(option_id, user_connected_model, user_connected_schema): """ update one options """ data, error = validate_data(option_schema, request) if error: return custom_response(data, 400) option_selected = user_connected_model.options.filter_by( id=option_id).first() if option_selected: option_selected_schema = option_schema.dump(option_selected) if data['special_dates']: data['special_dates'] = func.json_build_object( *convert_dict_to_sql_json(data['special_dates'])) option_selected_schema.update(data) option_selected.update(option_selected_schema) return custom_response(option_schema.dump(option_selected), 200) return custom_response("option not found", 404)
def get_approved_case_studies_by_supplier_code(self, supplier_code, domain_id): subquery = (db.session.query( CaseStudy.id.label('cs_id'), CaseStudy.data.label('case_study_data'), Domain.name.label('category_name')).join( Domain, Domain.name == CaseStudy.data['service'].astext).filter( CaseStudy.supplier_code == supplier_code, CaseStudy.status == 'approved', Domain.id == domain_id).subquery()) result = (db.session.query( subquery.c.category_name, func.json_agg( func.json_build_object( 'id', subquery.c.cs_id, 'data', subquery.c.case_study_data)).label('cs_data')).group_by( subquery.c.category_name)) results = result.one_or_none() return results._asdict() if results else {}
def get_approved_evidence(self, evidence_id): category_name_max_daily_rate = (db.session.query( Domain.name.label('category'), Evidence.data['maxDailyRate'].label('maxDailyRate')).join( Evidence, Evidence.domain_id == Domain.id).filter( Evidence.id == evidence_id).subquery()) evidence_domain_criteria = (db.session.query( Evidence.id.label('evidence_id'), func.json_array_elements_text( Evidence.data['criteria']).label('domain_criteria_id')).filter( Evidence.id == evidence_id).subquery()) subquery = (db.session.query( evidence_domain_criteria.c.domain_criteria_id, DomainCriteria.name.label('dc_name'), Evidence.data['evidence'][ evidence_domain_criteria.c.domain_criteria_id].label( 'evidence_data')).join( DomainCriteria, DomainCriteria.id == evidence_domain_criteria.c. domain_criteria_id.cast(Integer)).filter( Evidence.id == evidence_id).subquery()) evidence_data = (db.session.query( category_name_max_daily_rate.c.category, func.json_agg( func.json_build_object( 'dc_id', subquery.c.domain_criteria_id, 'domain_criteria_name', subquery.c.dc_name, 'evidence_data', subquery.c.evidence_data)).label('evidence')).group_by( category_name_max_daily_rate.c.category).subquery()) result = (db.session.query(category_name_max_daily_rate.c.category, category_name_max_daily_rate.c.maxDailyRate, evidence_data.c.evidence)) results = result.one_or_none() return results._asdict() if results else {}
def artists(): # TODO: replace with real data returned from querying the database # data=[{ # "id": 4, # "name": "Guns N Petals", # }, { # "id": 5, # "name": "Matt Quevedo", # }, { # "id": 6, # "name": "The Wild Sax Band", # }] query = db.session.query(func.json_build_object( 'id', Artist.id, 'name', Artist.name )).all() # Remove inner tuples data = [q[0] for q in query] return render_template('pages/artists.html', artists=data)
def get_suppliers(self): subquery = ( db .session .query( SupplierDomain.supplier_id, func.json_agg( func.json_build_object( 'category', Domain.name, 'status', SupplierDomain.status, 'price_status', SupplierDomain.price_status ) ).label('categories') ) .join(Domain) .group_by(SupplierDomain.supplier_id) .subquery() ) result = ( db .session .query( Supplier.code, Supplier.name, Supplier.abn, Supplier.status, Supplier.creation_time, Supplier.data['seller_type']['sme'].astext.label('sme'), subquery.columns.categories ) .join(subquery, Supplier.id == subquery.columns.supplier_id) .order_by(Supplier.code) .all() ) return [r._asdict() for r in result]
def create_new_service(user_connected_model, user_connected_schema): """ Create a new service by artist """ data, error = validate_data(service_schema, request, return_dict=False) if error: return custom_response(data, 400) all_user_services = user_connected_model.services.all() for service in all_user_services: if service.title == data['title'] and service.reference_city == data['reference_city'] \ or service.title == data['title'] and service.events == data['events']: return custom_response("same title and reference_city", 400) new_galleries = check_galleries_files(request, user_connected_model) data['galleries'] = list(set(data.get("galleries", []) + new_galleries)) if len(data['galleries']) == 0: return custom_response("i need galleries", 400) if not data.get("travel_expenses"): data["travel_expenses"] = user_connected_model.condition_globals[ 0].travel_expenses else: data['travel_expenses'] = func.json_build_object( *convert_dict_to_sql_json(data['travel_expenses'])) data['user_id'] = user_connected_schema["id"] data['materials_id'] = create_new_materials_for_new_services() if len(all_user_services) == 0: first_service('FirstService.html', user_connected_schema["email"], user_connected_schema["name"], data["title"]) new_service = Services(data) new_service.save() generate_basic_stars(service_id=new_service.id) return custom_response( return_services(service_schema.dump(new_service), new_service), 200)
def find_relations(query): branch = g.tran.query(func.row_to_json(text('branches.*'))).select_from(db.Branches) \ .filter_by(_deleted='infinity', _id=db.Payments.branch_id).as_scalar().label('branch') document = g.tran.query(func.row_to_json(text('document.*'))).select_from(db.Document) \ .filter_by(_deleted='infinity', _id=db.Payments.document_id).as_scalar().label('document') currency = g.tran.query(func.row_to_json(text('currencies.*'))).select_from(db.Currencies) \ .filter_by(_deleted='infinity', _id=db.Payments.currency_id).as_scalar().label('currency') payment_direction = g.tran.query(func.row_to_json(text('enums.*'))).select_from(db.Enums) \ .filter_by(_deleted='infinity', name='payment_direction') \ .filter(db.Enums.data['key'].cast(TEXT) == cast(db.Payments.payment_direction, TEXT)) \ .as_scalar().label('payment_direction_value') payment_type = g.tran.query(func.row_to_json(text('enums.*'))).select_from(db.Enums) \ .filter_by(_deleted='infinity', name='payment_type') \ .filter(db.Enums.data['key'].cast(TEXT) == cast(db.Payments.payment_type, TEXT)) \ .as_scalar().label('payment_type_value') payment_status = g.tran.query(func.row_to_json(text('enums.*'))).select_from(db.Enums) \ .filter_by(_deleted='infinity', name='payment_status') \ .filter(db.Enums.data['key'].cast(TEXT) == cast(db.Payments.payment_status, TEXT)) \ .as_scalar().label('payment_status_value') user = g.tran.query(func.json_build_object( "id", db.User.id, "username", db.User.username, "email", db.User.email, "rec_date", db.User.rec_date, "data", db.User.data, "role", db.User.role)).select_from(db.User).filter_by(id=db.UserCompany.user_id) \ .as_scalar().label('user') employee = g.tran.query(func.json_build_object('_id', db.UserCompany._id, '_rev', db.UserCompany._rev, '_created', db.UserCompany._created, 'user_id', db.UserCompany.user_id, 'company_id', db.UserCompany.company_id, 'entry_user_id', db.UserCompany.entry_user_id, 'branches_id', db.UserCompany.branches_id, 'roles_id', db.UserCompany.roles_id, 'access', db.UserCompany.access, 'user', user)).select_from(db.UserCompany) \ .filter_by(_deleted='infinity', company_id=db.Payments.company_id, user_id=db.Payments.data['employee_id'] .cast(INTEGER))\ .as_scalar().label('employee') contractor = g.tran.query(func.row_to_json(text('contractors.*'))).select_from(db.Contractors) \ .filter_by(_deleted='infinity', _id=db.Payments.data['contractor_id'].cast(TEXT)) \ .as_scalar().label('contractor') branch2 = g.tran.query(func.row_to_json(text('branches.*'))).select_from(db.Branches) \ .filter_by(_deleted='infinity', _id=db.Payments.data['branch_id'].cast(TEXT)).as_scalar().label('branch2') entry_user = g.tran.query(func.json_build_object( "id", db.User.id, "username", db.User.username, "email", db.User.email, "rec_date", db.User.rec_date, "data", db.User.data, "role", db.User.role)).select_from(db.User).filter_by(id=db.Payments.entry_user_id)\ .as_scalar().label('entry_user') query = query.add_columns(branch, document, currency, payment_direction, payment_type, payment_status, contractor, branch2, entry_user, employee) return query
def venues(): data = Venue.query.with_entities(Venue.city, Venue.state, postgresql.array_agg( func.json_build_object('id', Venue.id, 'name', Venue.name)).label('venues')) \ .group_by(Venue.city, Venue.state).all() return render_template('pages/venues.html', areas=data)
def listing(self, text_query=None, type=None, start_date=None, end_date=None, contributor_person_ids=None, contributor_group_ids=None, affiliation_group_ids=None, related_work_ids=None, offset=0, limit=100, order_by=None, principals=None): selected_work_ids = None if contributor_person_ids: query = self.session.query(Contributor.work_id.label('id')) query = query.filter(sql.or_(*[Contributor.person_id == pid for pid in contributor_person_ids])) query = query.group_by(Contributor.work_id) selected_work_ids = query.cte('selected_work_ids') elif contributor_group_ids: query = self.session.query(Contributor.work_id.label('id')) query = query.filter(sql.or_(*[Contributor.group_id == gid for gid in contributor_group_ids])) query = query.group_by(Contributor.work_id) selected_work_ids = query.cte('selected_work_ids') elif affiliation_group_ids: query = self.session.query(Affiliation.work_id.label('id')) query = query.filter(sql.or_(*[Affiliation.group_id == gid for gid in affiliation_group_ids])) query = query.group_by(Affiliation.work_id) selected_work_ids = query.cte('selected_work_ids') elif related_work_ids: query = self.session.query(Relation.work_id.label('id')) query = query.filter(sql.or_(*[Relation.target_id == wid for wid in related_work_ids])) query = query.group_by(Relation.work_id) selected_work_ids = query.cte('selected_work_ids') work_query = self.session.query(Work.id) if selected_work_ids is not None: work_query = work_query.join( selected_work_ids, selected_work_ids.c.id == Work.id) acl_filters = self.acl_filters(principals) if acl_filters: group_filters = [f for f in acl_filters if f.left.table.name == 'affiliations'] person_filters = [f for f in acl_filters if f.left.table.name == 'contributors'] if group_filters: query = self.session.query(Affiliation.work_id.label('id')) query = query.filter(sql.or_(*group_filters)) query = query.group_by(Affiliation.work_id) allowed_work_ids = query.cte('allowed_work_ids') allowed_group_query = query if person_filters: query = self.session.query(Contributor.work_id.label('id')) query = query.filter(sql.or_(*person_filters)) query = query.group_by(Contributor.work_id) allowed_work_ids = query.cte('allowed_work_ids') allowed_person_query = query if group_filters and person_filters: query = allowed_group_query.union( allowed_person_query).group_by('id') allowed_work_ids = query.cte('allowed_work_ids') work_query = work_query.join( allowed_work_ids, allowed_work_ids.c.id == Work.id) if start_date or end_date: duration = DateInterval([start_date, end_date]) work_query = work_query.filter(Work.during.op('&&')(duration)) if text_query: work_query = work_query.filter( Work.title.ilike('%%%s%%' % text_query)) if type: work_query = work_query.filter(Work.type == type) total = work_query.count() work_query = work_query.order_by(order_by or Work.issued.desc()) work_query = work_query.limit(limit).offset(offset) filtered_work_ids = work_query.cte('filtered_work_ids') listed_works = self.session.query( Work.id.label('id'), Work.type.label('type'), Work.issued.label('issued'), Work.title).join( filtered_work_ids, filtered_work_ids.c.id == Work.id).cte('listed_works') Target = aliased(Work) full_listing = self.session.query( listed_works, func.json_agg( func.json_build_object('id', Contributor.id, 'position', Contributor.position, 'name', Person.name, 'person_id', Person.id, 'initials', Person.initials, 'prefix', Person.family_name_prefix, 'given_name', Person.given_name, 'family_name', Person.family_name, 'description', Contributor.description, 'group_id', Contributor.group_id, 'role', Contributor.role) ).label('contributors'), func.json_agg( func.json_build_object('id', Relation.id, 'relation_type', Relation.type, 'type', Target.type, 'location', Relation.location, 'starting', Relation.starting, 'ending', Relation.ending, 'volume', Relation.volume, 'issue', Relation.issue, 'number', Relation.number, 'title', Target.title) ).label('relations'), func.array_agg( sql.distinct( func.concat(Group.id, ':', Group.name))).label('affiliations') ) full_listing = full_listing.outerjoin( Contributor, listed_works.c.id == Contributor.work_id).outerjoin( Person, Person.id == Contributor.person_id) full_listing = full_listing.outerjoin( Affiliation, Contributor.id == Affiliation.contributor_id).outerjoin( Group, Group.id == Affiliation.group_id) full_listing = full_listing.outerjoin( Relation, listed_works.c.id == Relation.work_id).outerjoin( Target, Target.id == Relation.target_id) full_listing = full_listing.group_by(listed_works).order_by( listed_works.c.issued.desc()) hits = [] contributor_role_ids = set(contributor_person_ids or []) for hit in full_listing.all(): aff_labels = dict([tuple(a.split(':', 1)) for a in hit.affiliations]) contributors = [] roles = set() # filter out contributor rows with a null id. # this happens with course relations contributor_rows = [c for c in hit.contributors if c.get('id')] contributor_rows.sort(key=itemgetter('position')) for contributor in contributor_rows: if contributor['person_id'] in contributor_role_ids: roles.add(contributor['role']) if (contributors and contributors[-1]['position'] == contributor['position']): # same contributor as previous one, (but other affiliation # it's hard to remove this with a distinct clause # in the json agg, so we remove it here continue cg_id = contributor['group_id'] if cg_id and str(cg_id) in aff_labels: # a group contributor is always added as an affiliation contributor['group_name'] = aff_labels[str(cg_id)] contributors.append(contributor) affiliations = [] for affiliation in hit.affiliations: id, name = affiliation.split(':', 1) affiliations.append(dict(id=id, name=name)) hits.append({'id': hit.id, 'title': hit.title, 'type': hit.type, 'roles': list(roles), 'issued': hit.issued.strftime('%Y-%m-%d'), 'relations': hit.relations, 'affiliations': affiliations, 'contributors': contributors}) return {'total': total, 'hits': hits, 'limit': limit, 'offset': offset}
def listing(bag): query = g.tran.query(db.Document._id).filter_by(_deleted='infinity', company_id=g.session['company_id']) doc_vars = vars(db.Document) for var in doc_vars: if isinstance(doc_vars[var], InstrumentedAttribute): query = query.add_column(doc_vars[var]) query = query.filter( or_(db.Document.data.contains(type_coerce({"user_id": g.user.id}, JSONB)), db.Document.data.contains(type_coerce({"executor_id": g.user.id}, JSONB)), type_coerce(db.Document.approval['roles_id'], JSONB).has_any(array(g.user.roles_id)) if len( g.user.roles_id) > 0 else None)) if "own" in bag and bag["own"] is True: query = query.filter(db.Document.data['user_id'] == g.user.id) else: query = query.filter(db.Document.document_status != 'draft') if "filter" in bag: if "data" in bag["filter"] and isinstance(bag["filter"]["data"], dict): query = query.filter(db.Document.data.contains(type_coerce(bag["filter"]["data"], JSONB))) del bag["filter"]["data"] query = query.filter_by(**bag["filter"]) if "order_by" in bag: query = query.order_by(*bag["order_by"]) else: query = query.order_by(db.Document._created.desc()) count = query.count() if "limit" in bag: query = query.limit(bag["limit"]) if "offset" in bag: query = query.offset(bag["offset"]) if 'with_related' in bag and bag['with_related'] is True: document_status_value = g.tran.query( func.row_to_json(text('enums.*'))).select_from(db.Enums) \ .filter_by(_deleted='infinity', name='document_status') \ .filter(db.Enums.data['key'].cast(TEXT) == cast(db.Document.document_status, TEXT)) \ .as_scalar().label('document_status_value') document_type_value = g.tran.query( func.row_to_json(text('enums.*'))).select_from(db.Enums) \ .filter_by(_deleted='infinity', name='document_type') \ .filter(db.Enums.data['key'].cast(TEXT) == cast(db.Document.document_type, TEXT)) \ .as_scalar().label('document_type_value') roles = g.tran.query(func.jsonb_agg(func.row_to_json(text('roles.*')))).select_from(db.Roles) \ .filter_by(_deleted='infinity')\ .filter(type_coerce(db.Document.approval['roles_id'], JSONB).has_any(array([db.Roles._id])))\ .as_scalar().label('roles') entry_user = g.tran.query(func.json_build_object( "id", db.User.id, "username", db.User.username, "email", db.User.email, "rec_date", db.User.rec_date, "data", db.User.data, "role", db.User.role)).select_from(db.User).filter_by( id=db.Document.entry_user_id) \ .as_scalar().label('entry_user') query = query.add_columns(document_status_value, document_type_value, entry_user, roles) return {'docs': orm_to_json(query.all()), 'count': count}
def listing(bag): table_name = bag["type"] table = getattr(db, table_name) if hasattr(db, table_name) else None if table is None or not issubclass(table, (db.Base, db.CouchSync)): raise CbsException(TABLE_NOT_FOUND) query = g.tran.query(table._id).filter_by(_deleted='infinity') doc_vars = vars(table) for var in doc_vars: if isinstance(doc_vars[var], InstrumentedAttribute): query = query.add_column(doc_vars[var]) if table == db.Menus: menus = [] if hasattr(g, 'user'): if g.user.role == 10: if 'all' in bag: menus = g.tran.query( db.Menus).filter_by(_deleted='infinity').all() else: menus = g.tran.query(db.Menus).filter_by(_deleted='infinity') \ .filter(and_(db.Menus.role == 10, db.Menus.active)).all() return {"docs": menus, "count": len(menus)} if g.user.role < 10 and g.user.role != 1: menus_id = [] roles = g.tran.query(db.Roles).filter_by(_deleted='infinity') \ .filter(db.Roles.roletype == g.user.roleType['roleType']).all() if roles: for role in roles: menus_id.extend(role.menus_id) menus = g.tran.query(db.Menus).filter_by(_deleted='infinity') \ .filter(and_(db.Menus._id.in_(menus_id), db.Menus.active)).all() return {"docs": menus, "count": len(menus)} elif g.user.role == 1: menus_id = [] roles = g.tran.query(db.Roles).filter_by(_deleted='infinity') \ .filter(db.Roles._id.in_(g.user.roles_id if g.user.roles_id is not None else [])).all() if roles: for role in roles: menus_id.extend(role.menus_id) menus = g.tran.query(db.Menus).filter_by(_deleted='infinity') \ .filter(and_(db.Menus._id.in_(menus_id), db.Menus.active)).all() return {"docs": menus, "count": len(menus)} else: menus = g.tran.query(db.Menus).filter_by(_deleted='infinity') \ .filter(and_(db.Menus.role == 0, db.Menus.active)).all() return {"docs": menus, "count": len(menus)} if table == db.Companies: if hasattr(g, 'user') and g.user.role != 1 and g.user.role != 10: comps_id = [] user_empl = g.tran.query(db.Companyemployees).filter( db.Companyemployees.user_id == g.user.id).all() if user_empl: for uc in user_empl: comps_id.append(uc.company_id) user_company = g.tran.query(db.Companies).filter_by( _deleted='infinity', user_id=g.user.id).all() if user_company: for co in user_company: comps_id.append(co._id) query = query.filter(db.Companies._id.in_(comps_id)) if 'current' in bag and bag['current'] is True and hasattr( g, 'company'): query = query.filter(db.Companies._id == g.company._id) if g.client != '1': query = query.filter(db.Companies.company_type == 'supplier') if table == db.Company_product: if hasattr(g, 'company'): query = query.filter( db.Company_product.company_id == g.company._id) if table == db.Companybank: if hasattr(g, 'company'): query = query.filter(db.Companybank.company_id == g.company._id) if table == db.Companyqualification: if hasattr(g, 'company'): query = query.filter( db.Companyqualification.company_id == g.company._id) if table == db.Companydocument: if hasattr(g, 'company'): query = query.filter( db.Companydocument.company_id == g.company._id) if table == db.DirSection: if "local" in bag and bag["local"] is True: products = g.tran.query(db.Product).filter_by(_deleted='infinity') \ .filter(db.Product.local == bag["local"]).all() spec_ids = [] spec_in_ids = [] for product in products: specification_ids = [] prodspecs = g.tran.query(db.ProductSpec).filter_by( _deleted='infinity', product_id=product._id).all() for prodspec in prodspecs: if prodspec.specification_id not in specification_ids: specification_ids.append(prodspec.specification_id) sii = Set(spec_in_ids) si = Set(specification_ids) ds = sii.symmetric_difference(si) if len(ds) > 0: spec_ids.extend(specification_ids) dirsections = g.tran.query(db.DirSection) \ .filter_by(_deleted='infinity').all() dir_id = [] for dirsection in dirsections: ds = Set(dirsection.dircategories_id) sids = Set(spec_ids) drs = ds.intersection(sids) if len(drs) > 0: dir_id.append(dirsection._id) query = query.filter(db.DirSection._id.in_(dir_id)) elif "local" in bag and bag["local"] is False: company_products = g.tran.query(db.Company_product).filter_by( _deleted='infinity', status='active').all() spec_ids = [] spec_in_ids = [] for product in company_products: specification_ids = [] prodspecs = g.tran.query(db.ProductSpec) \ .filter_by(_deleted='infinity', product_id=product.product_id).all() for prodspec in prodspecs: if prodspec.specification_id not in specification_ids: specification_ids.append(prodspec.specification_id) sii = Set(spec_in_ids) si = Set(specification_ids) ds = sii.symmetric_difference(si) if len(ds) > 0: spec_ids.extend(specification_ids) dirsections = g.tran.query(db.DirSection) \ .filter_by(_deleted='infinity').all() dir_id = [] for dirsection in dirsections: ds = Set(dirsection.dircategories_id) sids = Set(spec_ids) drs = ds.intersection(sids) if len(drs) > 0: dir_id.append(dirsection._id) query = query.filter(db.DirSection._id.in_(dir_id)) if table == db.Typeofownership: if 'filter' in bag and 'type_owner' in bag['filter']: query = query.filter( db.Typeofownership.type_owner == bag["filter"]["type_owner"]) del bag["filter"]["type_owner"] if table == db.DirCountry: query = query.order_by(db.DirCountry.data['index'].asc()) if "filter" in bag: if "data" in bag["filter"] and isinstance(bag["filter"]["data"], dict): query = query.filter( table.data.contains(type_coerce(bag["filter"]["data"], JSONB))) del bag["filter"]["data"] query = query.filter_by(**bag["filter"]) if "order_by" in bag: query = query.order_by(*bag["order_by"]) count = query.count() if "limit" in bag: query = query.limit(bag["limit"]) if "offset" in bag: query = query.offset(bag["offset"]) if "with_roles" in bag and bag["with_roles"] is True: if table == db.Companies: roles = g.tran.query(func.json_build_object( "id", db.Roles._id, "name", db.Roles.name, "data", db.Roles.data)).select_from(db.Roles) \ .filter_by(_deleted='infinity') \ .filter(type_coerce(db.Companies.roles_id, JSONB) .contains(type_coerce(func.jsonb_build_array(db.Roles._id), JSONB))).as_scalar().label( 'roles') query = query.add_columns(roles) if "with_related" in bag and bag["with_related"] is True: if table == db.Companies: companybank = g.tran.query(func.jsonb_agg(func.row_to_json(text('companybank.*')))) \ .select_from(db.Companybank) \ .filter_by(_deleted='infinity').filter(db.Companybank.company_id == db.Companies._id).as_scalar() \ .label('companybank') companuqualifications = g.tran.query(func.jsonb_agg(func.row_to_json(text('companyqualification.*')))) \ .select_from(db.Companyqualification) \ .filter_by(_deleted='infinity').filter( db.Companyqualification.company_id == db.Companies._id).as_scalar() \ .label('companuqualifications') roles = g.tran.query(func.json_build_object( "id", db.Roles._id, "name", db.Roles.name)).select_from(db.Roles) \ .filter_by(_deleted='infinity') \ .filter(type_coerce(db.Companies.roles_id, JSONB) .contains(type_coerce(func.jsonb_build_array(db.Roles._id), JSONB))).as_scalar().label( 'roles') query = query.add_columns(companybank, companuqualifications, roles) elif table == db.Message: comment = g.tran.query(func.jsonb_agg(func.row_to_json(text('comments.*')))) \ .select_from(db.Comments) \ .filter_by(_deleted='infinity').filter(db.Comments.message_id == db.Message._id).as_scalar() \ .label('comment') query = query.add_columns(comment) elif table == db.Companyemployees: user = g.tran.query(func.json_build_object( "id", db.User.id, "fullname", db.User.username, "email", db.User.email, "rec_date", db.User.rec_date, "data", db.User.data, "role", db.User.role)).select_from(db.User) \ .filter_by(id=db.Companyemployees.user_id) \ .as_scalar().label('user') company = g.tran.query(func.json_build_object( "id", db.Companies._id, "name", db.Companies.name, "inn", db.Companies.inn)).select_from(db.Companies) \ .filter_by(_deleted='infinity', _id=db.Companyemployees.company_id).as_scalar().label('company') roles = g.tran.query(func.json_build_object( "id", db.DirPosition._id, "name", db.DirPosition.name)).select_from(db.DirPosition) \ .filter_by(_deleted='infinity') \ .filter(type_coerce(db.Companyemployees.roles_id, JSONB) .contains(type_coerce(func.jsonb_build_array(db.DirPosition._id), JSONB))).as_scalar().label( 'roles') query = query.add_columns(user, company, roles) result = orm_to_json(query.all()) if "with_related" in bag and bag["with_related"] is True: result = find_relations(result, table_name) if table == db.Companies: for r in result: if 'inn' in r and r['inn'] in ['00609201310130', '01209201710029']: r['_created'] = '2020-07-13 00:00:00' r['end_date'] = '2021-01-13 00:00:00' elif 'inn' in r and r['inn'] in [ '02301201710287', '02202201310102', '01207201610238' ]: r['_created'] = '2020-07-22 00:00:00' r['end_date'] = '2021-01-22 00:00:00' return { "docs": result, "count": count, "current_date": datetime.datetime.today() }
def _get_tests_to_replicate_query(replica, bulk_size=200): session_entities_query = select([ models.session_entity.c.session_id, models.session_entity.c.entity_id ]).where(models.session_entity.c.session_id == models.Test.session_id).distinct().correlate(models.Test).alias() test_entities_query = select([ models.test_entity.c.test_id, models.test_entity.c.entity_id ]).where(models.test_entity.c.test_id == models.Test.id).distinct().correlate(models.Test).alias() query = select([ label("_type", text("'test'")), label("_index", text("'backslash'")), label("_id", models.Test.id), *[getattr(models.Test, column_name) for column_name in models.Test.__table__.columns.keys() if column_name not in {'timespan', 'parameters'}], models.User.email.label('user_email'), cast(models.Test.parameters, sqlalchemy.Text).label('parameters'), func.json_build_object( "file_name", models.TestInformation.file_name, "class_name", models.TestInformation.class_name, "name", models.TestInformation.name, "variation", cast(models.TestVariation.variation, sqlalchemy.Text), ).label('test'), select([func.array_agg( func.json_build_object( 'timestamp', models.Error.timestamp, 'message', models.Error.message) )]).where(models.Error.test_id == models.Test.id).label('errors'), select([ func.json_object_agg(models.SessionMetadata.key, models.SessionMetadata.metadata_item).label('session_metadata') ]).where(models.SessionMetadata.session_id == models.Test.session_id).label('session_metadata'), select([ func.json_object_agg(models.TestMetadata.key, models.TestMetadata.metadata_item) ]).where(models.TestMetadata.test_id == models.Test.id).label('test_metadata'), select([ func.array_agg( func.json_build_object( "name", models.Entity.name, "type", models.Entity.type, ) ) ]).select_from(session_entities_query.join(models.Entity, models.Entity.id == session_entities_query.c.entity_id)).label('session_entities'), select([ func.array_agg( func.json_build_object( "name", models.Entity.name, "type", models.Entity.type, ) ) ]).select_from(test_entities_query.join(models.Entity, models.Entity.id == test_entities_query.c.entity_id)).label('test_entities'), select([ func.array_agg( func.json_build_object( "name", models.Subject.name, "product", models.Product.name, "version", models.ProductVersion.version, "revision", models.ProductRevision.revision, ) ) ]).select_from( models.session_subject .join(models.SubjectInstance) .join(models.Subject) .join(models.ProductRevision) .join(models.ProductVersion) .join(models.Product) ).where(models.session_subject.c.session_id == models.Test.session_id).label('subjects'), ]).select_from( models.Test.__table__.join(models.Session.__table__) .outerjoin(models.User.__table__, models.Session.user_id == models.User.id) .outerjoin(models.TestInformation) .outerjoin(models.TestVariation) ).where(_REPLICATION_TEST_FILTER) if replica.untimed_done: if replica.last_replicated_timestamp is not None: query = query.where(or_( models.Test.updated_at > replica.last_replicated_timestamp, and_( models.Test.updated_at == replica.last_replicated_timestamp, models.Test.id > replica.last_replicated_id, ))) query = query.order_by(models.Test.updated_at.asc(), models.Test.id.asc()) else: query = query.where(models.Test.updated_at == None) if replica.last_replicated_id is not None: query = query.where( models.Test.id > replica.last_replicated_id) query = query.order_by(models.Test.id.asc()) return query.limit(bulk_size)
def get_team(self, team_id): team_leads = (db.session .query(TeamMember.team_id, User.id, User.name, User.email_address) .join(User) .filter(TeamMember.is_team_lead.is_(True)) .order_by(User.name) .subquery()) aggregated_team_leads = (db.session .query(team_leads.columns.team_id, func.json_object_agg( team_leads.columns.id, func.json_build_object( 'emailAddress', team_leads.columns.email_address, 'name', team_leads.columns.name ) ).label('teamLeads')) .group_by(team_leads.columns.team_id) .subquery()) team_members = (db.session .query(TeamMember.id.label('team_member_id'), TeamMember.team_id, User.id, User.name, User.email_address) .join(User) .filter(TeamMember.is_team_lead.is_(False)) .order_by(User.name) .subquery()) team_member_permissions = (db.session .query( team_members, func.coalesce( func.json_object_agg( TeamMemberPermission.permission, True ).filter(TeamMemberPermission.permission.isnot(None)), '{}' ).label('permissions')) .join( TeamMemberPermission, TeamMemberPermission.team_member_id == team_members.columns.team_member_id, isouter=True) .group_by( team_members.columns.team_member_id, team_members.columns.team_id, team_members.columns.id, team_members.columns.name, team_members.columns.email_address) .order_by(team_members.columns.name) .subquery()) aggregated_team_members = (db.session .query(team_member_permissions.columns.team_id, func.json_object_agg( team_member_permissions.columns.id, func.json_build_object( 'emailAddress', team_member_permissions.columns.email_address, 'name', team_member_permissions.columns.name, 'permissions', team_member_permissions.columns.permissions )).label('teamMembers')) .group_by(team_member_permissions.columns.team_id) .subquery()) team = (db.session .query(Team.id, Team.name, func.coalesce(Team.email_address, '').label('emailAddress'), Team.status, aggregated_team_leads.columns.teamLeads, aggregated_team_members.columns.teamMembers) .join(aggregated_team_leads, aggregated_team_leads.columns.team_id == Team.id, isouter=True) .join(aggregated_team_members, aggregated_team_members.columns.team_id == Team.id, isouter=True) .filter(Team.id == team_id) .one_or_none()) return team._asdict() if team else None
def get_fields_query(self): session_entities_query = select([ models.session_entity.c.session_id, models.session_entity.c.entity_id ]).where(models.session_entity.c.session_id == models.Test.session_id).distinct().correlate( models.Test).alias() test_entities_query = select([ models.test_entity.c.test_id, models.test_entity.c.entity_id ]).where(models.test_entity.c.test_id == models.Test.id).distinct().correlate(models.Test).alias() return select([ label("_type", text("'test'")), label("_index", text("'test'")), label("_id", models.Test.id), *[ getattr(models.Test, column_name) for column_name in models.Test.__table__.columns.keys() if column_name not in {'timespan', 'parameters'} ], models.Session.logical_id.label('session_logical_id'), models.User.email.label('user_email'), cast(models.Test.parameters, sqlalchemy.Text).label('parameters'), func.json_build_object( "file_name", models.TestInformation.file_name, "class_name", models.TestInformation.class_name, "name", models.TestInformation.name, "variation", cast(models.TestVariation.variation, sqlalchemy.Text), ).label('test'), select([ func.array_agg( func.json_build_object('timestamp', models.Error.timestamp, 'message', models.Error.message)) ]).where(models.Error.test_id == models.Test.id).label('errors'), select([ func.array_agg( func.json_build_object('timestamp', models.Warning.timestamp, 'message', models.Warning.message)) ]).where( models.Warning.test_id == models.Test.id).label('warnings'), select([ func.json_object_agg( models.SessionMetadata.key, models.SessionMetadata.metadata_item).label( 'session_metadata') ]).where(models.SessionMetadata.session_id == models.Test.session_id).label('session_metadata'), select([ func.json_object_agg(models.TestMetadata.key, models.TestMetadata.metadata_item) ]).where(models.TestMetadata.test_id == models.Test.id).label( 'test_metadata'), select([ func.array_agg( func.json_build_object( "name", models.Entity.name, "type", models.Entity.type, )) ]).select_from( session_entities_query.join( models.Entity, models.Entity.id == session_entities_query. c.entity_id)).label('session_entities'), select([ func.array_agg( func.json_build_object( "name", models.Entity.name, "type", models.Entity.type, )) ]).select_from( test_entities_query.join( models.Entity, models.Entity.id == test_entities_query.c.entity_id)).label('test_entities'), select([ func.array_agg( func.json_build_object( "name", models.Subject.name, "product", models.Product.name, "version", models.ProductVersion.version, "revision", models.ProductRevision.revision, )) ]).select_from( models.session_subject.join(models.SubjectInstance).join( models.Subject).join(models.ProductRevision).join( models.ProductVersion).join(models.Product)).where( models.session_subject.c.session_id == models.Test.session_id).label('subjects'), ]).select_from( models.Test.__table__.join(models.Session.__table__).outerjoin( models.User.__table__, models.Session.user_id == models.User.id).outerjoin( models.TestInformation).outerjoin(models.TestVariation))