def base_query(labeled=False): post = with_polymorphic( Post, [], Post.__table__, aliased=False, flat=True) content = with_polymorphic( Content, [], Content.__table__, aliased=False, flat=True) if labeled: query = db.query(post.id.label("post_id")) else: query = db.query(post.id) query = query.join(content, content.id == post.id) states = set(countable_publication_states) # Or just published? states.update(deleted_publication_states) if include_deleted is not None: if include_deleted is True: states = set(deleted_publication_states) else: query = query.filter(content.tombstone_date == None) # noqa: E711 if include_moderating is True: states.add(PublicationStates.SUBMITTED_AWAITING_MODERATION) state_condition = post.publication_state.in_(states) if user_id: if include_moderating == "mine": state_condition = state_condition | ( post.publication_state.in_([ PublicationStates.SUBMITTED_AWAITING_MODERATION, PublicationStates.DRAFT]) & (post.creator_id == user_id)) else: state_condition = state_condition | ( (post.publication_state == PublicationStates.DRAFT) & (post.creator_id == user_id)) query = query.filter(state_condition) return post, query
def test_join_to_join_entities(self): sess = create_session() pa = with_polymorphic(Person, [Engineer]) pa_alias = with_polymorphic(Person, [Engineer], aliased=True) eq_( [(p1.name, type(p1), p2.name, type(p2)) for (p1, p2) in sess.query( pa, pa_alias ).join(pa_alias, or_( pa.Engineer.primary_language == pa_alias.Engineer.primary_language, and_( pa.Engineer.primary_language == None, # noqa pa_alias.Engineer.primary_language == None, pa.person_id > pa_alias.person_id )) ).order_by(pa.name, pa_alias.name)], [ ('dilbert', Engineer, 'dilbert', Engineer), ('dogbert', Manager, 'pointy haired boss', Boss), ('vlad', Engineer, 'vlad', Engineer), ('wally', Engineer, 'wally', Engineer) ] )
def test_join_to_join_columns(self): sess = create_session() pa = with_polymorphic(Person, [Engineer]) pa_alias = with_polymorphic(Person, [Engineer], aliased=True) eq_( [ row for row in sess.query( pa.name, pa.Engineer.primary_language, pa_alias.name, pa_alias.Engineer.primary_language, ) .join( pa_alias, or_( pa.Engineer.primary_language == pa_alias.Engineer.primary_language, and_( pa.Engineer.primary_language == None, # noqa pa_alias.Engineer.primary_language == None, pa.person_id > pa_alias.person_id, ), ), ) .order_by(pa.name, pa_alias.name) ], [ ("dilbert", "java", "dilbert", "java"), ("dogbert", None, "pointy haired boss", None), ("vlad", "cobol", "vlad", "cobol"), ("wally", "c++", "wally", "c++"), ], )
def test_join_to_join_columns(self): sess = create_session() pa = with_polymorphic(Person, [Engineer]) pa_alias = with_polymorphic(Person, [Engineer], aliased=True) eq_( [row for row in sess.query( pa.name, pa.Engineer.primary_language, pa_alias.name, pa_alias.Engineer.primary_language ).join(pa_alias, or_( pa.Engineer.primary_language == pa_alias.Engineer.primary_language, and_( pa.Engineer.primary_language == None, # noqa pa_alias.Engineer.primary_language == None, pa.person_id > pa_alias.person_id )) ).order_by(pa.name, pa_alias.name)], [ ('dilbert', 'java', 'dilbert', 'java'), ('dogbert', None, 'pointy haired boss', None), ('vlad', 'cobol', 'vlad', 'cobol'), ('wally', 'c++', 'wally', 'c++') ] )
def load_discussion(self, discussion): self.discussion = discussion post = with_polymorphic(Content, [Post]) ICL = with_polymorphic( IdeaContentLink, [], IdeaContentLink.__table__, aliased=False, flat=True) post = with_polymorphic( Post, [], Post.__table__, aliased=False, flat=True) # This should be a join but creates a subquery content = with_polymorphic( Content, [], Content.__table__, aliased=False, flat=True) q = discussion.db.query( ICL.idea_id, ICL.type, post.ancestry.op('||')(post.id.cast(String)) ).join(post, post.id == ICL.content_id ).join(content, content.id == post.id ).filter( ICL.idea_id != None, content.discussion_id==discussion.id, content.hidden==False) for (idea_id, typename, path) in q: path += "," if typename in self.positives: self.paths[idea_id].add_path(PostPathData(path, True)) elif typename in self.negatives: self.paths[idea_id].add_path(PostPathData(path, False)) for ppc in self.paths.itervalues(): ppc.reduce()
def _probe_query_hosts(probe_type, ids, cfg, hm, queries): if probe_type == 'hosts': queries.append(DBSession().query(Host)\ .filter(Host.id.in_(ids)) ) elif probe_type == 'entities': queries.append(DBSession().query(Host)\ .filter(Host.entity_id.in_(ids)) ) elif probe_type == 'domains': queries.append(DBSession().query(Host)\ .filter(Host.domain_id.in_(ids)) ) elif probe_type == 'houses': queries.append(DBSession().query(Host)\ .join(with_polymorphic(Entity, Entity))\ .join(Address)\ .filter(Address.house_id.in_(ids)) ) elif probe_type == 'streets': queries.append(DBSession().query(Host)\ .join(with_polymorphic(Entity, Entity))\ .join(Address)\ .join(House)\ .filter(House.street_id.in_(ids)) ) elif probe_type == 'districts': queries.append(DBSession().query(Host)\ .join(with_polymorphic(Entity, Entity))\ .join(Address)\ .join(House)\ .join(Street)\ .filter(Street.district_id.in_(ids)) ) elif probe_type == 'cities': queries.append(DBSession().query(Host)\ .join(with_polymorphic(Entity, Entity))\ .join(Address)\ .join(House)\ .join(Street)\ .filter(Street.city_id.in_(ids)) ) elif probe_type == 'housegroups': queries.append(DBSession().query(Host)\ .join(with_polymorphic(Entity, Entity))\ .join(Address)\ .join(House)\ .join(HouseGroupMapping)\ .filter(HouseGroupMapping.group_id.in_(ids)) ) elif probe_type == 'places': queries.append(DBSession().query(Host)\ .join(NetworkDevice)\ .filter(NetworkDevice.place_id.in_(ids)) )
def orphan_clause(self, user_id=None, content=None, include_deleted=False, include_moderating=None): root_path = self.paths[self.root_idea_id] db = self.discussion.default_db subq = root_path.as_clause_base( db, include_deleted=include_deleted, include_moderating=include_moderating, user_id=user_id if include_moderating else None) content = content or with_polymorphic( Content, [], Content.__table__, aliased=False, flat=True) synth_post_type = SynthesisPost.__mapper_args__['polymorphic_identity'] webpage_post_type = Webpage.__mapper_args__['polymorphic_identity'] q = db.query(content.id.label("post_id")).filter( (content.discussion_id == self.discussion.id) & (content.hidden == False) & # noqa: E712 (content.type.notin_((synth_post_type, webpage_post_type))) & content.id.notin_(subq)) post = with_polymorphic( Post, [], Post.__table__, aliased=False, flat=True) q = q.join(post, post.id == content.id) states = set(countable_publication_states) # Or just published? states.update(deleted_publication_states) if include_deleted is not None: if include_deleted is True: states = set(deleted_publication_states) else: q = q.filter(content.tombstone_date == None) # noqa: E711 if include_moderating is True: states.add(PublicationStates.SUBMITTED_AWAITING_MODERATION) state_condition = post.publication_state.in_(states) if user_id: if include_moderating == "mine": state_condition = state_condition | ( post.publication_state.in_([ PublicationStates.SUBMITTED_AWAITING_MODERATION, PublicationStates.DRAFT]) & (post.creator_id == user_id)) else: state_condition = state_condition | ( (post.publication_state == PublicationStates.DRAFT) & (post.creator_id == user_id)) q = q.filter(state_condition) if user_id: # subquery? q = q.outerjoin( ViewPost, (ViewPost.post_id == content.id) & (ViewPost.tombstone_date == None) & (ViewPost.actor_id == user_id) # noqa: E711 ).add_columns(ViewPost.id) return q
def test_all_subq_query(self): A, B, B2, C, C2, D = self.classes("A", "B", "B2", "C", "C2", "D") session = Session(testing.db) b_b2 = with_polymorphic(B, [B2], flat=True) c_c2 = with_polymorphic(C, [C2], flat=True) q = session.query(A).options( subqueryload(A.bs.of_type(b_b2)) .subqueryload(b_b2.cs.of_type(c_c2)) .subqueryload(c_c2.ds) ) self.assert_sql_execution( testing.db, q.all, CompiledSQL("SELECT t_a.id AS t_a_id FROM t_a", {}), CompiledSQL( "SELECT t_b_1.type AS t_b_1_type, t_b_1.id AS t_b_1_id, " "t_b_1.a_id AS t_b_1_a_id, t_b2_1.id AS t_b2_1_id, " "anon_1.t_a_id AS anon_1_t_a_id FROM " "(SELECT t_a.id AS t_a_id FROM t_a) AS anon_1 " "JOIN (t_b AS t_b_1 LEFT OUTER JOIN t_b2 AS t_b2_1 " "ON t_b_1.id = t_b2_1.id) ON anon_1.t_a_id = t_b_1.a_id " "ORDER BY anon_1.t_a_id", {}, ), CompiledSQL( "SELECT t_c_1.type AS t_c_1_type, t_c_1.id AS t_c_1_id, " "t_c_1.b_id AS t_c_1_b_id, t_c2_1.id AS t_c2_1_id, " "t_b_1.id AS t_b_1_id FROM (SELECT t_a.id AS t_a_id FROM t_a) " "AS anon_1 JOIN (t_b AS t_b_1 LEFT OUTER JOIN t_b2 AS t_b2_1 " "ON t_b_1.id = t_b2_1.id) ON anon_1.t_a_id = t_b_1.a_id " "JOIN (t_c AS t_c_1 LEFT OUTER JOIN t_c2 AS t_c2_1 ON " "t_c_1.id = t_c2_1.id) ON t_b_1.id = t_c_1.b_id " "ORDER BY t_b_1.id", {}, ), CompiledSQL( "SELECT t_d.id AS t_d_id, t_d.c_id AS t_d_c_id, " "t_c_1.id AS t_c_1_id " "FROM (SELECT t_a.id AS t_a_id FROM t_a) AS anon_1 " "JOIN (t_b AS t_b_1 LEFT OUTER JOIN t_b2 AS t_b2_1 " "ON t_b_1.id = t_b2_1.id) " "ON anon_1.t_a_id = t_b_1.a_id " "JOIN (t_c AS t_c_1 LEFT OUTER JOIN t_c2 AS t_c2_1 " "ON t_c_1.id = t_c2_1.id) " "ON t_b_1.id = t_c_1.b_id " "JOIN t_d ON t_c_1.id = t_d.c_id ORDER BY t_c_1.id", {}, ), )
def get_counts_for_query(self, q): # HACKITY HACK entities = [ x.entity_zero.entity for x in q._entities] entities = {e.__mapper__.tables[0].name: e for e in entities} content_entity = entities['content'] post = with_polymorphic( Post, [], Post.__table__, aliased=False, flat=True) q = q.join( post, (content_entity.id == post.id) & (post.publication_state.in_(countable_publication_states))) if self.user_id: action_entity = entities['action'] return q.with_entities( count(content_entity.id), count(post.creator_id.distinct()), count(action_entity.id)).first() else: (post_count, contributor_count) = q.with_entities( count(content_entity.id), count(post.creator_id.distinct())).first() return (post_count, contributor_count, 0)
def cash_instances_to_dict(self, list_of_classes, do_make_transient = False): #Закрываем сессию. Открываем новую и считываем в нее все классы. Отвязываем их от сессии. #Возвращаем список инстансов в виде словаря. Надеюсь, это поможет работать с ними сколь угодно много.. #Была идея оставить возможность не закрывать сесиию - отказался. В худшем случае, можно отдельную сессию создавать. #Но две одновременные сессии - тоже опасно. self.close_session() self.private_activate_session() dict_with_instances = dict() for cls_i in list_of_classes: #Интересно, нужно ли как-то особо считывать взаимосвязи repr_cls_i = with_polymorphic(cls_i, '*') inst_list = [] for inst_i in self.active_session.query(repr_cls_i).options(immediateload('*')).all(): #if not(inst_i in inst_list): inst_list.append(inst_i) dict_with_instances[cls_i.__name__] = inst_list self.active_session.expunge_all() #именно поэтому закрываем сессию до запуска for inst_list in dict_with_instances.itervalues(): for inst_i in inst_list: if hasattr(inst_i, "disconnected_from_session"): raise BaseException("[c_session_handler][cash_instances_to_dict] you cannot use 'disconnected_from_session' attribute in a class here") inst_i.disconnected_from_session = True if do_make_transient: #Без этого может пытаться обратиться к базе данных make_transient(inst_i) self.close_session() return dict_with_instances
def test_any_wpoly(self): ParentThing, DataContainer, Job, SubJob = \ self.classes.ParentThing,\ self.classes.DataContainer,\ self.classes.Job,\ self.classes.SubJob Job_P = with_polymorphic(Job, SubJob, aliased=True) s = Session() q = s.query(Job).join(DataContainer.jobs).\ filter( DataContainer.jobs.of_type(Job_P).\ any(Job_P.id < Job.id) ) self.assert_compile(q, "SELECT job.id AS job_id, job.type AS job_type, " "job.container_id " "AS job_container_id " "FROM data_container " "JOIN job ON data_container.id = job.container_id " "WHERE EXISTS (SELECT 1 " "FROM (SELECT job.id AS job_id, job.type AS job_type, " "job.container_id AS job_container_id, " "subjob.id AS subjob_id, subjob.attr AS subjob_attr " "FROM job LEFT OUTER JOIN subjob ON job.id = subjob.id) AS anon_1 " "WHERE data_container.id = anon_1.job_container_id AND job.id > anon_1.job_id)" )
def test_any_wpoly(self): ParentThing, DataContainer, Job, SubJob = ( self.classes.ParentThing, self.classes.DataContainer, self.classes.Job, self.classes.SubJob, ) Job_P = with_polymorphic(Job, SubJob, aliased=True, flat=True) s = Session() q = ( s.query(Job) .join(DataContainer.jobs) .filter(DataContainer.jobs.of_type(Job_P).any(Job_P.id < Job.id)) ) self.assert_compile( q, "SELECT job.id AS job_id, job.type AS job_type, " "job.widget_id AS job_widget_id, " "job.container_id " "AS job_container_id " "FROM data_container " "JOIN job ON data_container.id = job.container_id " "WHERE EXISTS (SELECT 1 " "FROM job AS job_1 LEFT OUTER JOIN subjob AS subjob_1 " "ON job_1.id = subjob_1.id " "WHERE data_container.id = job_1.container_id " "AND job_1.id < job.id)", )
def test_joinedload_explicit_with_unaliased_poly_compile(self): sess = Session() target = with_polymorphic(Person, Engineer) q = sess.query(Company).filter_by(company_id=1).options(joinedload(Company.employees.of_type(target))) assert_raises_message( sa_exc.InvalidRequestError, "Detected unaliased columns when generating joined load.", q._compile_context )
def test_joinedload_explicit_with_flataliased_poly_compile(self): sess = Session() target = with_polymorphic(Person, Engineer, flat=True) q = sess.query(Company).\ filter_by(company_id=1).\ options(joinedload(Company.employees.of_type(target))) self.assert_compile( q, "SELECT companies.company_id AS companies_company_id, " "companies.name AS companies_name, " "people_1.person_id AS people_1_person_id, " "people_1.company_id AS people_1_company_id, " "people_1.name AS people_1_name, people_1.type AS people_1_type, " "engineers_1.person_id AS engineers_1_person_id, " "engineers_1.status AS engineers_1_status, " "engineers_1.engineer_name AS engineers_1_engineer_name, " "engineers_1.primary_language AS engineers_1_primary_language " "FROM companies LEFT OUTER JOIN (people AS people_1 " "LEFT OUTER JOIN engineers AS engineers_1 " "ON people_1.person_id = engineers_1.person_id " "LEFT OUTER JOIN managers AS managers_1 " "ON people_1.person_id = managers_1.person_id) " "ON companies.company_id = people_1.company_id " "WHERE companies.company_id = :company_id_1 " "ORDER BY people_1.person_id" )
def search_added(self, year, month=None, day=None, types='*', limit=None): ''' Search by added date ''' date_trunc = 'day' if day else 'month' if month else 'year' month, day = month or 1, day or 1 search_date = date(year, month, day) search_for = orm.with_polymorphic(Content, types) search_query = self.dbsession.query(search_for) filters = sql.and_( search_for.filter_published(), sql.func.date_trunc( date_trunc, sql.cast(search_for.added, Date) ) == sql.func.date_trunc( date_trunc, search_date ) ) if types != '*': ids = polymorphic_ids(search_for, types) filters.append(search_for.content_type_id.in_(ids)) search_query = search_query.filter(filters) count = search_query.count() search_query = search_query.order_by(search_for.added.desc()) if limit: search_query = search_query.limit(limit) return search_result(search_query, count)
def get_indexable_contents(session): from assembl.models import AgentProfile, Idea, Post from assembl.models.post import PublicationStates query = session.query(Idea ).filter(Idea.tombstone_condition() ).filter(Idea.hidden==False ).options( joinedload(Idea.title).joinedload("entries"), joinedload(Idea.synthesis_title).joinedload("entries"), joinedload(Idea.description).joinedload("entries") ) for idea in query: yield idea query = session.query(AgentProfile) for user in query: yield user AllPost = with_polymorphic(Post, '*') query = session.query(AllPost ).filter(AllPost.tombstone_condition() ).filter(AllPost.hidden==False ).filter(AllPost.publication_state == PublicationStates.PUBLISHED ).options( joinedload(AllPost.subject).joinedload("entries"), joinedload(AllPost.body).joinedload("entries") ) for post in query: for extract in post.extracts: yield extract yield post
def __init__(self, request, starts=None, count=-6, types='*'): super().__init__(request) if starts is None: starts = date.today() ranges = list(itermonths(starts, count)) if count < 0: ranges = list(reversed(ranges)) range_end_day = monthrange(*ranges[-1])[1] range_start = date(*ranges[0], 1) range_end = date(*ranges[-1], range_end_day) entity = orm.with_polymorphic(Content, types) filters = sql.and_( entity.filter_published(), entity.added.between(range_start, range_end) ) self.types_ids = polymorphic_ids(entity, types) if types != '*' else [] if self.types_ids: filters.append(entity.content_type_id.in_(self.types_ids)) col = sql.func.date_trunc('month', entity.added) archives = self.dbsession.query( sql.func.count().label('cpt'), col.label('ts') ).join(entity.type).filter(filters).group_by(col).order_by(col.desc()) self.archives = archives.all()
def hosts_get_by_region(context, region_id, filters): """Get all hosts for this region. :param region_id: ID for the region :param filters: filters wich contains differnt keys/values to match. Supported filters are by name, ip_address, id and cell_id. """ host_devices = with_polymorphic(models.Device, [models.Host]) query = model_query(context, host_devices, project_only=True) query = query.filter_by(region_id=region_id) query = query.filter_by(type='hosts') if "name" in filters: query = query.filter_by(name=filters["name"]) if "ip_address" in filters: query = query.filter_by(ip_address=filters["ip_address"]) if "id" in filters: query = query.filter_by(id=filters["id"]) if "cell" in filters: query = query.filter_by(cell_id=filters["cell"]) if "device_type" in filters: query = query.filter_by(device_type=filters["device_type"]) try: result = query.all() except sa_exc.NoResultFound: raise exceptions.NotFound() except Exception as err: raise exceptions.UnknownException(message=err) return result
def base_query(labeled=False): post = with_polymorphic( Post, [], Post.__table__, aliased=False, flat=True) if labeled: return post, db.query(post.id.label("post_id")) else: return post, db.query(post.id)
def test_load(self): s = Session() with_poly = with_polymorphic(Person, [Engineer, Manager], flat=True) emp = s.query(Company).options( subqueryload(Company.employees.of_type(with_poly))).first() e2 = pickle.loads(pickle.dumps(emp))
def netdevices_get_by_region(context, region_id, filters): """Get all network devices for the given region.""" devices = with_polymorphic(models.Device, [models.NetDevice]) query = model_query(context, devices, project_only=True) query = query.filter_by(region_id=region_id) query = query.filter_by(type='net_devices') result = query.all() return result
def indirect_idea_content_links_with_cache( self, links_above_post=None, filter=True): "Return all ideaContentLinks related to this post or its ancestors" # WIP: idea_content_links_above_post is still loaded separately # despite not being deferred. Deferring it hits a sqlalchemy bug. # Still appreciable performance gain using it instead of the orm, # and the ICL cache below. # TODO: move in path_utils? links_above_post = (self.idea_content_links_above_post if links_above_post is None else links_above_post) if not links_above_post: return [] from pyramid.threadlocal import get_current_request from .idea_content_link import IdeaContentLink from .idea import Idea icl_polymap = IdeaContentLink.__mapper__.polymorphic_map request = get_current_request() if getattr(request, "_idea_content_link_cache2", None) is None: if getattr(request, "_idea_content_link_cache1", None) is None: icl = with_polymorphic(IdeaContentLink, IdeaContentLink) co = with_polymorphic(Content, Content) request._idea_content_link_cache1 = {x[0]: x for x in self.db.query( icl.id, icl.idea_id, icl.content_id, icl.creator_id, icl.type, icl.creation_date).join(co).filter( co.discussion_id == self.discussion_id)} request._idea_content_link_cache2 = {} def icl_representation(id): if id not in request._idea_content_link_cache2: data = request._idea_content_link_cache1.get(id, None) if data is None: return None request._idea_content_link_cache2[id] = { "@id": IdeaContentLink.uri_generic(data[0]), "idIdea": Idea.uri_generic(data[1]), "idPost": Content.uri_generic(data[2]), "idCreator": AgentProfile.uri_generic(data[3]), "@type": icl_polymap[data[4]].class_.external_typename(), "created": data[5].isoformat() + "Z" } return request._idea_content_link_cache2[id] icls = [icl_representation(int(id)) for id in links_above_post.strip(',').split(',')] if filter: icls = self.filter_idea_content_links_r(icls) return icls
def test_with_polymorphic_any(self): sess = Session() wp = with_polymorphic(Person, [Engineer], aliased=True) eq_( sess.query(Company.company_id) .filter(Company.employees.of_type(wp).any(wp.Engineer.primary_language == "java")) .all(), [(1,)], )
def go(): target = with_polymorphic(Person, Engineer, aliased=True) eq_( sess.query(Company).\ filter_by(company_id=1).\ options(joinedload(Company.employees.of_type(target))).\ all(), [self._company_with_emps_fixture()[0]] )
def go(): wp = with_polymorphic(Person, [Engineer, Manager], aliased=True) eq_( sess.query(Company).join( Company.employees.of_type(wp) ).order_by(Company.company_id, wp.person_id).\ options(contains_eager(Company.employees, alias=wp)).all(), [self.c1, self.c2] )
def go(): target = with_polymorphic(Person, Engineer) eq_( sess.query(Company). filter_by(company_id=1). options(subqueryload(Company.employees.of_type(target))). all(), [self._company_with_emps_fixture()[0]] )
def test_join_base_to_sub(self): sess = create_session() pa = with_polymorphic(Person, [Engineer]) def go(): eq_(sess.query(pa) .filter(pa.Engineer.primary_language == 'java').all(), self._emps_wo_relationships_fixture()[0:1]) self.assert_sql_count(testing.db, go, 1)
def list_anamnesis(patient_id, appointment_id=None): authorized_roles = [constants.ROLE_DENTIST, constants.ROLE_NURSE, constants.ROLE_ASSISTANT] if session["role"] not in authorized_roles: return redirect(url_for("index")) survey_form = ChooseSurveyForm(request.form) survey_form.survey_id.choices = meta.session.query(anamnesis.Survey.id, anamnesis.Survey.name).all() patient, appointment = checks.get_patient_appointment(patient_id, appointment_id) global_anamnesis = with_polymorphic(anamnesis.Anamnesis, "*") patient_anamnesis = ( meta.session.query(global_anamnesis) .filter(anamnesis.Anamnesis.patient_id == patient_id) .order_by( anamnesis.Anamnesis.alert.desc(), anamnesis.Anamnesis.anamnesis_type, anamnesis.MedicalHistory.type, anamnesis.MedicalHistory.disease, anamnesis.Anamnesis.time_stamp, ) .all() ) doctor = meta.session.query(md.MedecineDoctor).filter(md.MedecineDoctor.id == patient.gen_doc_id).one_or_none() patient_survey_form = PatientSurveyForm(request.form) patient_survey_form.appointment_id.choices = [ (appointment.id, appointment.agenda.starttime) for appointment in meta.session.query(schedule.Appointment) .filter( schedule.Appointment.patient_id == patient.id, schedule.Appointment.id.in_([anamnesis_entry.appointment_id for anamnesis_entry in patient_anamnesis]), ) .join(schedule.Agenda) .order_by(schedule.Agenda.starttime.desc()) .all() ] anamnesis_files = ( meta.session.query(documents.Files) .filter(documents.Files.id.in_([anamn.file_id for anamn in patient_anamnesis])) .order_by(documents.Files.timestamp.desc()) .all() ) return render_template( "patient_anamnesis.html", patient=patient, appointment=appointment, patient_anamnesis=patient_anamnesis, doctor=doctor, survey_form=survey_form, constants=constants, patient_survey_form=patient_survey_form, anamnesis_files=anamnesis_files, )
def hosts_delete(context, host_id): """Delete an existing host.""" session = get_session() with session.begin(): host_devices = with_polymorphic(models.Device, '*') query = model_query(context, host_devices, session=session, project_only=True) query = query.filter_by(id=host_id) query.delete() return
def netdevices_delete(context, netdevice_id): """Delete existing network device.""" session = get_session() with session.begin(): device = with_polymorphic(models.Device, '*') query = model_query(context, device, session=session, project_only=True) query = query.filter_by(type='net_devices') query = query.filter_by(id=netdevice_id) query.delete()
def hosts_get_by_id(context, host_id): """Get details for the host with given id.""" host_devices = with_polymorphic(models.Device, '*') query = model_query(context, host_devices, project_only=True).\ filter_by(id=host_id) try: result = query.one() LOG.info("Result by host id %s" % result) except sa_exc.NoResultFound: LOG.error("No result found for host with id %s" % host_id) raise exceptions.NotFound() except Exception as err: raise exceptions.UnknownException(message=err) return result
def lugares(cls, session, search=None): lugares = with_polymorphic(Lugar, [ Direccion, Escuela, LugarDictado, Secretaria, Instituto, Prosecretaria, Maestria, Catedra ]) q = None if not search: q = session.query(lugares) else: ''' TODO: no se como sacar la subclase Catedra de la consulta. analizar otras posibilidades. ahora esta filtrado ''' q = session.query(lugares).filter(lugares.Catedra.id == None, lugares.nombre.op('~*')(search)) return q.all()
def __init__(self, request, limit=5, tmpl='recent_posts'): super().__init__(request) self.template = self.template.format(tmpl) entity = orm.with_polymorphic(Content, [Document, Event]) filters = sql.and_(entity.filter_published(), ContentType.name.in_(['document', 'event'])) posts = self.dbsession.query(entity).join(entity.type).\ options(orm.contains_eager(entity.type)).filter(filters).\ order_by(entity.added.desc()).limit(limit) self.posts = posts.all()
def test_with_poly_sub(self): Person = _poly_fixtures.Person Engineer = _poly_fixtures.Engineer emapper = inspect(Engineer) p_poly = with_polymorphic(Person, [Engineer]) e_poly = inspect(p_poly.Engineer) p_poly = inspect(p_poly) p1 = PathRegistry.coerce((p_poly, emapper.attrs.machines)) # polymorphic AliasedClass - the path uses _entity_for_mapper() # to get the most specific sub-entity eq_(p1.path, (e_poly, emapper.attrs.machines))
def check_reset_token(self, email, token): base_user_and_user = with_polymorphic(BaseUser, User) token_ = ResetToken.query \ .filter(ResetToken.expire > utcnow()) \ .join(base_user_and_user) \ .filter(base_user_and_user.User.email == email) \ .order_by(ResetToken.creation_date.desc()) \ .first() if not token_: return False return token_.token == token
def execute_scheduled_tasks(): """For all tasks which are due, call their respective implementation and handle the result. Implementations are given by `task_type_to_impl`. Errors are reported to the creator via `send_user_send_mail`. """ task_and_subtypes = with_polymorphic(Task, "*") tasks = (session.session.query(task_and_subtypes).filter( task_and_subtypes.status == TaskStatus.OPEN, task_and_subtypes.due <= session.utcnow()).all()) print(f"executing {len(tasks)} scheduled tasks") for task in tasks: repair_session() task_impl = get_task_implementation(task) try: task_impl.execute(task) except Exception as e: task_impl.errors.append(str(e)) repair_session() if task_impl.new_status is not None: task.status = task_impl.new_status if task_impl.errors: task.errors = task_impl.errors for error in task.errors: print(f"Error while executing task: {error}") try: write_task_message( task, f"Processed {task.type.name} task. Status: {task.status.name}", log=True) except ObjectDeletedError: logger.error("Task instance deleted (broken polymorphism?)", exc_info=True) continue session.session.commit() if task.status == TaskStatus.FAILED: send_template_mails(['*****@*****.**'], TaskFailedTemplate(), task=task)
def delete(): ''' Deletes the profile ''' id = request.args.get('id', type=int) entities = with_polymorphic(Profile, '*') profile = db.session().query(entities).filter_by(id=id).first() db.session().delete(profile) db.session().commit() resp = jsonify(profile.json_dict()) return resp
def new_screener_question(org_id, project_id): org = (Organisation.query.filter_by(user_id=current_user.id).filter_by( id=org_id).first_or_404()) project = (db.session.query(Project).filter_by( user_id=current_user.id).filter(Project.id == project_id).first()) screener_questions_poly = with_polymorphic(Question, [ScreenerQuestion]) question = (db.session.query(screener_questions_poly).filter_by( user_id=current_user.id).filter_by(project_id=project_id).filter_by( organisation_id=org_id).first()) if question is not None: flash("Not allowed! You can only add one screener question.", "error") return redirect(url_for("project.index", org_id=org_id)) q = (db.session.query(Question).filter_by(user_id=current_user.id).filter( Project.id == project_id).first()) form = AddScreenerQuestionForm() if form.validate_on_submit(): appt = ScreenerQuestion( project_id=project.id, title=form.title.data, description=form.description.data, required_answer=form.required_answer.data, answer_option_one=form.answer_option_one.data, answer_option_two=form.answer_option_two.data, answer_option_three=form.answer_option_three.data, question_type=QuestionTypes.ScreenerQuestion.value, user_id=current_user.id, ) db.session.add(appt) db.session.commit() flash("Successfully created".format(appt.title), "form-success") return redirect( url_for( "project.project_details", org_id=org_id, project_id=project_id, name=appt.title, )) # return redirect(url_for('question.question_details', # question_id=appt.id, name=appt.name)) else: flash("ERROR! Data was not added.", "error") return render_template( "question/create_screener_question.html", form=form, project_id=project_id, project_name=project.name, org_id=org_id, )
def test_col_expression_base_plus_two_subs(self): sess = create_session() pa = with_polymorphic(Person, [Engineer, Manager]) eq_( sess.query(pa.name, pa.Engineer.primary_language, pa.Manager.manager_name).\ filter(or_(pa.Engineer.primary_language=='java', pa.Manager.manager_name=='dogbert')).\ order_by(pa.Engineer.type).all(), [ ('dilbert', 'java', None), ('dogbert', None, 'dogbert'), ] )
def hosts_update(context, host_id, values): """Update an existing host.""" session = get_session() with session.begin(): host_devices = with_polymorphic(models.Device, '*') query = model_query(context, host_devices, session=session, project_only=True) query = query.filter_by(id=host_id) host_ref = query.with_for_update().one() host_ref.update(values) host_ref.save(session) return host_ref
def search(): if request.method == 'POST': term = request.form['search_term'] if term == "": flash("Enter a name to search for") return redirect('/') users = with_polymorphic(User, [Teacher]) results = db.session.query(Teacher).filter(Teacher.name.contains(term)).all() # results = Student.query.filter(Student.email.contains(term)).all() if not results: flash("No teachers found with that name.") return redirect('/') return render_template('search_results.html', results=results) else: return redirect(url_for('main.index'))
def netdevices_update(context, netdevice_id, values): """Update existing network device""" session = get_session() with session.begin(): device = with_polymorphic(models.Device, '*') query = model_query(context, device, session=session, project_only=True) query = query.filter_by(type='net_devices') query = query.filter_by(id=netdevice_id) netdevice_ref = query.with_for_update().one() netdevice_ref.update(values) netdevice_ref.save(session) return netdevice_ref
def test_with_poly_base(self): Person = _poly_fixtures.Person Engineer = _poly_fixtures.Engineer pmapper = inspect(Person) emapper = inspect(Engineer) p_poly = with_polymorphic(Person, [Engineer]) p_poly = inspect(p_poly) # "name" is actually on Person, not Engineer p1 = PathRegistry.coerce((p_poly, emapper.attrs.name)) # polymorphic AliasedClass - because "name" is on Person, # we get Person, not Engineer eq_(p1.path, (p_poly, pmapper.attrs.name))
def hosts_update(context, host_id, values): """Update an existing host.""" session = get_session() with session.begin(): host_devices = with_polymorphic(models.Device, '*') query = model_query(context, host_devices, session=session, project_only=True) query = query.filter_by(id=host_id) host_ref = query.with_for_update().one() try: host_ref.update(values) except exceptions.ParentIDError as e: raise exceptions.BadRequest(message=str(e)) host_ref.save(session) return host_ref
def test_with_polymorphic_join_compile_one(self): sess = Session() self.assert_compile( sess.query(Company).join( Company.employees.of_type( with_polymorphic( Person, [Engineer, Manager], aliased=True, flat=True ) ) ), "SELECT companies.company_id AS companies_company_id, " "companies.name AS companies_name FROM companies " "JOIN %s" % (self._polymorphic_join_target([Engineer, Manager])), )
def update(): ''' Updates the profile ''' profile_json = request.get_json() entities = with_polymorphic(Profile, '*') profile = db.session().query(entities).filter_by( id=profile_json['id']).first() profile.update_from_dict(profile_json) db.session().commit() profile = db.session().query(entities).filter_by(id=profile.id).first() resp = jsonify(profile.json_dict()) return resp
def test_options_of_type(self): with_poly = with_polymorphic(Person, [Engineer, Manager], flat=True) for opt, serialized in [ ( sa.orm.joinedload(Company.employees.of_type(Engineer)), [(Company, "employees", Engineer)], ), ( sa.orm.joinedload(Company.employees.of_type(with_poly)), [(Company, "employees", None)], ), ]: opt2 = pickle.loads(pickle.dumps(opt)) eq_(opt.__getstate__()["path"], serialized) eq_(opt2.__getstate__()["path"], serialized)
def test_with_poly_use_mapper(self): Person = _poly_fixtures.Person Engineer = _poly_fixtures.Engineer emapper = inspect(Engineer) p_poly = with_polymorphic(Person, [Engineer], _use_mapper_path=True) p_poly = inspect(p_poly) p1 = PathRegistry.coerce((p_poly, emapper.attrs.machines)) # polymorphic AliasedClass with the "use_mapper_path" flag - # the AliasedClass acts just like the base mapper eq_( p1.path, (emapper, emapper.attrs.machines) )
def network_devices_update(context, network_device_id, values): """Update existing network device""" session = get_session() with session.begin(): device = with_polymorphic(models.Device, '*') query = model_query(context, device, session=session, project_only=True) query = query.filter_by(type='network_devices') query = query.filter_by(id=network_device_id) network_device_ref = query.with_for_update().one() try: network_device_ref.update(values) except exceptions.ParentIDError as e: raise exceptions.BadRequest(message=str(e)) network_device_ref.save(session) return network_device_ref
def get_voter_ids_query(self, start=None, end=None): vote_specifications = [] for proposal in self.idea.get_vote_proposals(): vote_specifications.extend(proposal.criterion_for) from .votes import AbstractIdeaVote vote_class = with_polymorphic(AbstractIdeaVote, AbstractIdeaVote) query = self.db.query( vote_class.voter_id).filter_by(tombstone_date=None).filter( vote_class.vote_spec_id.in_([ vote_spec.id for vote_spec in vote_specifications ])).distinct() if start is not None: query = query.filter(vote_class.vote_date >= start) if end is not None: query = query.filter(vote_class.vote_date <= end) return query
def test_join_wpoly_innerjoin(self): ParentThing, DataContainer, Job, SubJob = \ self.classes.ParentThing,\ self.classes.DataContainer,\ self.classes.Job,\ self.classes.SubJob Job_P = with_polymorphic(Job, SubJob, innerjoin=True) s = Session() q = s.query(DataContainer).join(DataContainer.jobs.of_type(Job_P)) self.assert_compile( q, "SELECT data_container.id AS data_container_id, " "data_container.name AS data_container_name " "FROM data_container JOIN " "(job JOIN subjob ON job.id = subjob.id) " "ON data_container.id = job.container_id")
def load_from_ext_id(cls, external_id, session): """Find the object in the database :param external_id: the id to look for :param session: the session to use :rtype: cls :return: the object that corresponds to the external_id :raise: IrmaDatabaseResultNotFound, IrmaDatabaseError """ try: filext_plus_cls = with_polymorphic(FileExt, [cls]) return session.query(filext_plus_cls).filter( cls.external_id == str(external_id)).one() except NoResultFound as e: raise IrmaDatabaseResultNotFound(e) except MultipleResultsFound as e: raise IrmaDatabaseError(e)
def fulltext(self, query, types='*', limit=None): # Base query search_for = orm.with_polymorphic(Content, types) search_query = self.dbsession.query(search_for) # Transform query to a ts_query q_ts = sql.func.plainto_tsquery(query) # Default string to highlight results (for ts_headline) hl_sel = "StartSel='<span class=\"search_hl\">', StopSel=</span>" # Highlight title and descriptions columns (through the ts_headline() # function) hl_title = sql.func.ts_headline(Content.title, q_ts, hl_sel) hl_descr = sql.func.ts_headline(Content.description, q_ts, hl_sel) # Where clause filters = sql.and_( search_for.filter_published(), q_ts.op('@@')(Content.fts), Content.is_fts ) if types != '*': ids = polymorphic_ids(search_for, types) filters.append(search_for.content_type_id.in_(ids)) search_query = search_query.filter(filters) # Count how much rows we have count = search_query.count() # Add the two highlighted columns search_query = search_query.add_columns( hl_title.label('hl_title'), hl_descr.label('hl_descr') ) search_query = search_query.order_by( q_ts.op('@@')(Content.fts) ) if limit: search_query = search_query.limit(limit) return search_result(search_query, count)
def get_num_votes(self, start=None, end=None): vote_specifications = [] for proposal in self.idea.get_vote_proposals(): vote_specifications.extend(proposal.criterion_for) from .votes import AbstractIdeaVote vote_class = with_polymorphic(AbstractIdeaVote, AbstractIdeaVote) query = self.db.query( vote_class.voter_id).filter_by(tombstone_date=None).filter( vote_class.vote_spec_id.in_( [vote_spec.id for vote_spec in vote_specifications])) if start is not None: query = query.filter(vote_class.vote_date >= start) if end is not None: query = query.filter(vote_class.vote_date <= end) # There is no distinct on purpose here. # For a token vote spec, voting on two categories is counted as 2 votes. return query.count()
def __update_media(self, attrs): session = Session() video_or_song = with_polymorphic(Media, [Song, Video]) m_obj = Session.query(video_or_song)\ .filter(Media.m_id == self.media["m_id"])\ .one_or_none() if m_obj is None: log.debug('media with id %d is gone, was probably deleted. ' 'Ignoring media update.' % self.media["m_id"]) return for key in attrs: setattr(m_obj, key, attrs[key]) self.media[key] = attrs[key] session.commit() Session.remove()
def test_joinedload_wpoly(self): ParentThing, DataContainer, Job, SubJob = \ self.classes.ParentThing,\ self.classes.DataContainer,\ self.classes.Job,\ self.classes.SubJob Job_P = with_polymorphic(Job, SubJob, aliased=True) s = Session(testing.db) q = s.query(DataContainer).\ options(joinedload(DataContainer.jobs.of_type(Job_P))) def go(): eq_(q.all(), self._dc_fixture()) self.assert_sql_count(testing.db, go, 1)
def test_contains_eager_wpoly(self): DataContainer, Job, SubJob = ( self.classes.DataContainer, self.classes.Job, self.classes.SubJob, ) Job_P = with_polymorphic(Job, SubJob, aliased=True) s = Session(testing.db) q = (s.query(DataContainer).join( DataContainer.jobs.of_type(Job_P)).options( contains_eager(DataContainer.jobs.of_type(Job_P)))) def go(): eq_(q.all(), self._dc_fixture()) self.assert_sql_count(testing.db, go, 5)
def test_join_explicit_wpoly_flat(self): ParentThing, DataContainer, Job, SubJob = \ self.classes.ParentThing,\ self.classes.DataContainer,\ self.classes.Job,\ self.classes.SubJob Job_P = with_polymorphic(Job, SubJob, flat=True) s = Session() q = s.query(DataContainer).join(Job_P, DataContainer.jobs) self.assert_compile( q, "SELECT data_container.id AS data_container_id, " "data_container.name AS data_container_name " "FROM data_container JOIN " "(job AS job_1 LEFT OUTER JOIN subjob AS subjob_1 " "ON job_1.id = subjob_1.id) " "ON data_container.id = job_1.container_id")
def test_load_only_subclass_from_relationship_polymorphic(self): s = Session() wp = with_polymorphic(Person, [Manager], flat=True) q = s.query(Company).join(Company.employees.of_type(wp)).options( contains_eager(Company.employees.of_type(wp)).load_only( wp.Manager.status, wp.Manager.manager_name)) self.assert_compile( q, "SELECT people_1.person_id AS people_1_person_id, " "people_1.type AS people_1_type, " "managers_1.person_id AS managers_1_person_id, " "managers_1.status AS managers_1_status, " "managers_1.manager_name AS managers_1_manager_name, " "companies.company_id AS companies_company_id, " "companies.name AS companies_name " "FROM companies JOIN (people AS people_1 LEFT OUTER JOIN " "managers AS managers_1 ON people_1.person_id = " "managers_1.person_id) ON companies.company_id = " "people_1.company_id")
def test_col_expression_base_plus_two_subs(self): sess = create_session() pa = with_polymorphic(Person, [Engineer, Manager]) eq_( sess.query( pa.name, pa.Engineer.primary_language, pa.Manager.manager_name ) .filter( or_( pa.Engineer.primary_language == "java", pa.Manager.manager_name == "dogbert", ) ) .order_by(pa.Engineer.type) .all(), [("dilbert", "java", None), ("dogbert", None, "dogbert")], )