Exemplo n.º 1
0
    def rbacsForRightSQL(self, rightId, sql=None):
        '''
        @see: IRbacService.rbacsForRightSQL
        '''
        child, parent = aliased(RoleNode), aliased(RoleNode)

        subq = sql or self.session().query(RbacMapped)
        subq = subq.join(RbacRole, RbacRole.rbac == RbacMapped.Id)
        subq = subq.join(parent, parent.role == RbacRole.role)
        subq = subq.join(
            child, and_(child.left >= parent.left,
                        child.right <= parent.right))
        subq = subq.join(RbacRight, RbacRight.rbac == child.role)
        subq = subq.join(
            RightMapped,
            and_(RightMapped.Id == RbacRight.right, RightMapped.Id == rightId))

        sql = sql or self.session().query(RbacMapped)
        sql = sql.join(RbacRight, RbacRight.rbac == RbacMapped.Id)
        sql = sql.join(
            RightMapped,
            and_(RightMapped.Id == RbacRight.right, RightMapped.Id == rightId))

        sql = sql.union(subq).distinct(RbacMapped.Id).order_by(RbacMapped.Id)

        return sql
Exemplo n.º 2
0
    def get_data(self):
        logging.debug("These queries will take a few mins to run.")

        budget_query = self.session.query(
            self.models.MovieInfo.movie_id,
            func.max(
                cast(
                    func.replace(
                        func.replace(self.models.MovieInfo.info, ",", ""), "$",
                        ""), types.Numeric)).label('budget')).filter(
                            self.models.MovieInfo.movie_id.in_(self.movie_ids),
                            self.models.MovieInfo.info_type_id.in_(
                                [BUDGET_TYPE_ID]),
                            self.models.MovieInfo.info.like('$%'),
                        ).group_by(self.models.MovieInfo.movie_id).subquery()

        year_query = self.session.query(
            self.models.MovieInfo.movie_id,
            func.min(func.substr(
                self.models.MovieInfo.info, -4)).label('release_year')).filter(
                    self.models.MovieInfo.movie_id.in_(self.movie_ids),
                    self.models.MovieInfo.info_type_id.in_([RELEASE_DATES_ID]),
                    self.models.MovieInfo.info.like('USA:%')).group_by(
                        self.models.MovieInfo.movie_id).subquery()

        budget_alias = aliased(self.models.MovieInfo, budget_query)
        year_alias = aliased(self.models.MovieInfo, year_query)

        return self.session.query(
            budget_query.columns.movie_id, budget_query.columns.budget,
            year_query.columns.release_year).join(
                year_alias,
                year_alias.movie_id == budget_alias.movie_id).distinct(
                    self.models.MovieInfo.movie_id).filter(
                        self.models.MovieInfo.movie_id.in_(self.movie_ids), )
Exemplo n.º 3
0
def search_user_checkpoints(user_obj, search_term):
    """
    Searches both my UserCheckpoints and my friends' with a literal search term.
    Searches a Checkpoint's name, description, and User's (Facebook) name (union)
    """
    from db import UserCheckpoint, db, FacebookUser, User, FriendConnection, Checkpoint
    search_term_with_wildcard = "%"+search_term+"%"
    
    #query that contain my own and my friends' usercheckpoints
    FriendUserCheckpoint, FriendFacebookUser, FriendUser = aliased(UserCheckpoint), aliased(FacebookUser), aliased(User)
    ucp = (db.session.query(UserCheckpoint).
           join(Checkpoint, Checkpoint.id == UserCheckpoint.checkpoint_id).
           join(FriendUser, FriendUser.id == UserCheckpoint.user_id).
           join(FriendFacebookUser, FriendFacebookUser.id == FriendUser.facebook_user_id).
           join(FriendConnection, FriendConnection.fb_user_to == FriendFacebookUser.id).
           join(FacebookUser, FacebookUser.id == FriendConnection.fb_user_from).
           join(User, User.facebook_user_id == FacebookUser.id).
           filter(or_(User.id == user_obj.id, FriendUser.id == user_obj.id)).
           filter(Checkpoint.demo == False)        
           )
    
    name_search = ucp.filter(Checkpoint.name.ilike(search_term_with_wildcard))
    desc_search = ucp.filter(Checkpoint.description.ilike(search_term_with_wildcard))
    fb_name_search = ucp.filter(FriendFacebookUser.name.ilike(search_term_with_wildcard))
    
    result = name_search.union(desc_search).union(fb_name_search)
    
    return result.all()
Exemplo n.º 4
0
    def get_for_kontkurs_id(self, kontkurs_id, sem=1, subgroup_number=0):
        Kontgrp2 = aliased(Kontgrp)
        KontgrpCommon = aliased(Kontgrp)
        KontgrpParent = aliased(Kontgrp)

        if not isinstance(kontkurs_id, list):
            kontkurs_id = [kontkurs_id, ]

        if int(subgroup_number):
            filter_ = and_(or_(
                Raspnagr.kontkurs_id.in_(kontkurs_id),
                Kontlist.kontkurs_id.in_(kontkurs_id),
                KontgrpCommon.kont_id.in_(kontkurs_id),
            ), or_(
                KontgrpCommon.id == None,
                and_(KontgrpParent.id == None, KontgrpCommon.ngroup == subgroup_number),
                KontgrpParent.ngroup == subgroup_number
            ), )
        else:
            filter_ = or_(
                Raspnagr.kontkurs_id.in_(kontkurs_id),
                KontgrpCommon.kont_id.in_(kontkurs_id),
                Kontlist.kontkurs_id.in_(kontkurs_id),
            )

        raspnagrs = Raspnagr.query.filter(
            Raspnagr.id.in_(Raspnagr.query
                            .outerjoin(Kontgrp, Raspnagr.kontgrp_id == Kontgrp.id)
                            .outerjoin(Kontgrplist, Kontgrplist.op == Raspnagr.op)
                            .outerjoin(Kontlist, Kontlist.op == Raspnagr.op)
                            .outerjoin(Kontgrp2, Kontgrp2.id == Kontgrplist.kontgrp_id)
                            .outerjoin(KontgrpCommon, func.coalesce(Kontgrp2.id, Kontgrp.id) == KontgrpCommon.id)
                            .outerjoin(KontgrpParent, KontgrpCommon.parent_id == KontgrpParent.id)
                            .filter(filter_).filter(Raspnagr.sem == sem).with_entities(Raspnagr.id)))
        return raspnagrs
Exemplo n.º 5
0
def unwanted_list(request):
    who = aliased(User, name="who")
    by = aliased(User, name="by")
    a = DBSession.query(UnWanted, who, by). \
        join(who, UnWanted.user).join(by, UnWanted.by_user). \
        all()
    a = obj_list_join(a, ['', 'user_', 'by_'])
    return {'unwanted': a}
Exemplo n.º 6
0
    def my_members(self):
        """Returns a dictionary from ("User"|"Group", "name") tuples to records."""

        parent = aliased(Group)
        group_member = aliased(Group)
        user_member = aliased(User)

        now = datetime.utcnow()

        users = (self.session.query(
            label("id", user_member.id),
            label("type", literal("User")),
            label("name", user_member.username),
            label("role", GroupEdge._role),
            label("edge_id", GroupEdge.id),
            label("expiration", GroupEdge.expiration),
        ).filter(
            parent.id == self.id,
            parent.id == GroupEdge.group_id,
            user_member.id == GroupEdge.member_pk,
            GroupEdge.active == True,
            parent.enabled == True,
            user_member.enabled == True,
            or_(GroupEdge.expiration > now, GroupEdge.expiration == None),
            GroupEdge.member_type == 0,
        ).group_by("type", "name").subquery())

        groups = (self.session.query(
            label("id", group_member.id),
            label("type", literal("Group")),
            label("name", group_member.groupname),
            label("role", GroupEdge._role),
            label("edge_id", GroupEdge.id),
            label("expiration", GroupEdge.expiration),
        ).filter(
            parent.id == self.id,
            parent.id == GroupEdge.group_id,
            group_member.id == GroupEdge.member_pk,
            GroupEdge.active == True,
            parent.enabled == True,
            group_member.enabled == True,
            or_(GroupEdge.expiration > now, GroupEdge.expiration == None),
            GroupEdge.member_type == 1,
        ).subquery())

        query = (self.session.query("id", "type", "name", "role", "edge_id",
                                    "expiration").select_entity_from(
                                        union_all(users.select(),
                                                  groups.select())).order_by(
                                                      desc("role"),
                                                      desc("type")))

        return OrderedDict(
            ((record.type, record.name), record) for record in query.all())
Exemplo n.º 7
0
def runs_cst(session, time_since=None, run_ids=None, run_status=None):
    """
    :param sqlalchemy.orm.Session session:
    :param datetime time_since: date cutoff for a run to be retrieved
    :param list run_ids: filter by specific run ids
    :param list run_status: filter by specific run statuses
    """
    r = None
    s = None
    if run_ids:
        r = session.query(t.Process.processid) \
            .join(t.Process.type) \
            .join(t.Process.udfs) \
            .filter(t.ProcessUdfView.udfname == 'RunID') \
            .filter(t.ProcessUdfView.udfvalue.in_(run_ids)) \
            .filter(t.ProcessType.displayname == 'AUTOMATED - Sequence').subquery('r')

    if run_status:
        s = session.query(t.Process.processid) \
            .join(t.Process.type) \
            .join(t.Process.udfs) \
            .filter(t.ProcessUdfView.udfname == 'Run Status') \
            .filter(t.ProcessUdfView.udfvalue.in_(run_status)) \
            .filter(t.ProcessType.displayname == 'AUTOMATED - Sequence').subquery('s')

    parent_process_io_tracker = aliased(t.ProcessIOTracker)

    gparent_output_mapping = aliased(t.OutputMapping)
    gparent_process_io_tracker = aliased(t.ProcessIOTracker)
    gparent_process = aliased(t.Process)
    gparent_process_type = aliased(t.ProcessType)

    q = session.query(t.Process.processid, gparent_process.processid, gparent_process.daterun) \
        .distinct(gparent_process.processid) \
        .join(t.Process.type) \
        .join(t.Process.processiotrackers) \
        .join(t.ProcessIOTracker.artifact) \
        .join(t.OutputMapping,            t.OutputMapping.outputartifactid == t.Artifact.artifactid) \
        .join(parent_process_io_tracker,  t.OutputMapping.trackerid == parent_process_io_tracker.trackerid) \
        .join(gparent_output_mapping,     gparent_output_mapping.outputartifactid == parent_process_io_tracker.inputartifactid) \
        .join(gparent_process_io_tracker, gparent_output_mapping.trackerid == gparent_process_io_tracker.trackerid) \
        .join(gparent_process, gparent_process.processid == gparent_process_io_tracker.processid) \
        .join(gparent_process_type,       gparent_process.typeid == gparent_process_type.typeid) \
        .filter(t.ProcessType.displayname == 'AUTOMATED - Sequence')

    if time_since:
        q = q.filter(func.date(t.Process.createddate) > func.date(time_since))
    if r is not None:
        q = q.filter(t.Process.processid.in_(r))
    if s is not None:
        q = q.filter(t.Process.processid.in_(s))

    results = q.all()
    return results
Exemplo n.º 8
0
    def rolesForRbacSQL(self, rbacId, sql=None):
        """
        @see: IRbacService.rolesForRbacSQL
        """
        child, parent = aliased(RoleNode), aliased(RoleNode)

        sql = sql or self.session().query(RoleMapped)
        sql = sql.join(child, child.role == RoleMapped.Id)
        sql = sql.join(parent, and_(child.left >= parent.left, child.right <= parent.right))
        sql = sql.join(RbacRole, and_(RbacRole.role == parent.role, RbacRole.rbac == rbacId))

        return sql
Exemplo n.º 9
0
    def rolesForRbacSQL(self, rbacId, sql=None):
        '''
        @see: IRbacService.rolesForRbacSQL
        '''
        child, parent = aliased(RoleNode), aliased(RoleNode)

        sql = sql or self.session().query(RoleMapped)
        sql = sql.join(child, child.role == RoleMapped.Id)
        sql = sql.join(parent, and_(child.left >= parent.left, child.right <= parent.right))
        sql = sql.join(RbacRole, and_(RbacRole.role == parent.role, RbacRole.rbac == rbacId))
        sql = sql.order_by(child.left)

        return sql
Exemplo n.º 10
0
    def get_for_kontkurs_id(cls, kontkurs_id, sem, subgroup_numbers=None):
        if subgroup_numbers is None:
            subgroup_numbers = []

        Kontgrp2 = aliased(Kontgrp)
        KontgrpCommon = aliased(Kontgrp)
        KontgrpParent = aliased(Kontgrp)
        KontgrpChild = aliased(Kontgrp)

        if len(subgroup_numbers) and subgroup_numbers[0] != '0':
            conditions_common = []
            conditions_parent = []
            for item in subgroup_numbers:
                c = item.split('-')
                if len(c) > 1:
                    conditions_common.append(and_(KontgrpCommon.ngroup == c[0], KontgrpChild.ngroup == c[1]))
                    conditions_parent.append(and_(KontgrpParent.ngroup == c[0], KontgrpCommon.ngroup == c[1]))
                else:
                    conditions_common.append(KontgrpCommon.ngroup == c[0])
                    conditions_parent.append(KontgrpParent.ngroup == c[0])

            filter_ = and_(or_(
                Raspnagr.kontkurs_id == kontkurs_id,
                Kontlist.kontkurs_id == kontkurs_id,
                KontgrpCommon.kont_id == kontkurs_id,
            ), or_(
                KontgrpCommon.id == None,
                and_(KontgrpParent.id == None, or_(*conditions_common)),
                or_(*conditions_parent)
            ))
        else:
            filter_ = func.coalesce(Kontlist.kontkurs_id, KontgrpCommon.kont_id, Raspnagr.kontkurs_id) == kontkurs_id

        zaoch = RaspisZaoch.query.with_entities(RaspisZaoch.id) \
            .outerjoin(Raspnagr, Raspnagr.id == RaspisZaoch.raspnagr_id) \
            .outerjoin(Kontlist, Kontlist.op == Raspnagr.op) \
            .outerjoin(Kontgrplist, Kontgrplist.op == Raspnagr.op) \
            .outerjoin(Kontgrp, Kontgrp.id == Kontgrplist.kontgrp_id) \
            .outerjoin(Kontgrp2, Raspnagr.kontgrp_id == Kontgrp2.id) \
            .outerjoin(KontgrpCommon, func.coalesce(Kontgrp.id, Kontgrp2.id) == KontgrpCommon.id) \
            .outerjoin(KontgrpParent, KontgrpCommon.parent_id == KontgrpParent.id) \
            .outerjoin(KontgrpChild, KontgrpChild.parent_id == KontgrpCommon.id) \
            .filter(filter_) \
            .filter(Raspnagr.sem == sem)

        result = RaspisZaoch.query.filter(
            RaspisZaoch.id.in_(zaoch)
        )

        return result
Exemplo n.º 11
0
def set_recent_outings(waypoint, lang):
    """Set last 10 outings on routes associated to the given waypoint.
    """
    t_outing_route = aliased(Association, name='a1')
    t_route_wp = aliased(Association, name='a2')
    with_query_waypoints = _get_select_children(waypoint)

    recent_outing_ids = get_first_column(
        DBSession.query(Outing.document_id).
        filter(Outing.redirects_to.is_(None)).
        join(
            t_outing_route,
            Outing.document_id == t_outing_route.child_document_id).
        join(
            t_route_wp,
            and_(
                t_route_wp.child_document_id ==
                t_outing_route.parent_document_id,
                t_route_wp.child_document_type == ROUTE_TYPE,
            )).
        join(
            with_query_waypoints,
            with_query_waypoints.c.document_id == t_route_wp.parent_document_id
        ).
        order_by(Outing.date_end.desc()).
        limit(NUM_RECENT_OUTINGS).
        all())

    total = DBSession.query(Outing.document_id). \
        filter(Outing.redirects_to.is_(None)). \
        join(
            t_outing_route,
            Outing.document_id == t_outing_route.child_document_id). \
        join(
            t_route_wp,
            and_(
                t_route_wp.child_document_id ==
                t_outing_route.parent_document_id,
                t_route_wp.child_document_type == ROUTE_TYPE,
            )). \
        join(
            with_query_waypoints,
            with_query_waypoints.c.document_id == t_route_wp.parent_document_id
        ). \
        count()

    waypoint.associations['recent_outings'] = get_documents_for_ids(
        recent_outing_ids, lang, outing_documents_config, total)
Exemplo n.º 12
0
    def my_members(self):
        # type: () -> Mapping[Tuple[str, str], Any]
        """Returns a dictionary from ("User"|"Group", "name") tuples to records."""

        parent = aliased(Group)
        group_member = aliased(Group)
        user_member = aliased(User)

        now = datetime.utcnow()

        users = (self.session.query(
            label("id", user_member.id),
            label("type", literal("User")),
            label("name", user_member.username),
            label("role", GroupEdge._role),
            label("edge_id", GroupEdge.id),
            label("expiration", GroupEdge.expiration),
        ).filter(
            parent.id == self.id,
            parent.id == GroupEdge.group_id,
            user_member.id == GroupEdge.member_pk,
            GroupEdge.active == True,
            parent.enabled == True,
            user_member.enabled == True,
            or_(GroupEdge.expiration > now, GroupEdge.expiration == None),
            GroupEdge.member_type == 0,
        ).order_by(desc("role"), "name"))

        groups = (self.session.query(
            label("id", group_member.id),
            label("type", literal("Group")),
            label("name", group_member.groupname),
            label("role", GroupEdge._role),
            label("edge_id", GroupEdge.id),
            label("expiration", GroupEdge.expiration),
        ).filter(
            parent.id == self.id,
            parent.id == GroupEdge.group_id,
            group_member.id == GroupEdge.member_pk,
            GroupEdge.active == True,
            parent.enabled == True,
            group_member.enabled == True,
            or_(GroupEdge.expiration > now, GroupEdge.expiration == None),
            GroupEdge.member_type == 1,
        ).order_by(desc("role"), "name"))

        return OrderedDict(
            ((r.type, r.name), r) for r in itertools.chain(users, groups))
Exemplo n.º 13
0
def getContacts(displayName='Krishna', phoneNum='8427434777'):
    '''
    Fetch list of contacts for a user
    {
      "displayName": "Manmohan",
      "phoneNum": "9023051078",
      "user_id": "4"
    }
    '''

    req_json = request.get_json()
    #displayName = req_json['displayName']
    #phoneNum = req_json['phoneNum']
    sender_id = req_json['sender_id']

    auth_header = request.headers.get('Authorization')
    sub = jwtUtil.parse_decode_auth_token(auth_header, SECRETKEY)
    if ('INVALID' == sub):
        return json.dumps({'status': 'Logged Out'})

    session = Session()
    u = aliased(Users)

    contact_objects = session.query(u.user_id, u.display_name, u.callback_url, u.country_phone_code, u.create_date, u.phone_number).\
    filter(u.contact_id == sender_id).all()
    schema = UserSchema(many=True)
    contactList = schema.dump(contact_objects)

    session.close()
    return json.dumps(contactList.data)
Exemplo n.º 14
0
def date(date, latest=False):
    try:
        if isinstance(date, (str, unicode)):
            date = datetime.strptime(date, "%Y-%m-%d")

        if isinstance(date, datetime):
            date = date.date()

    except:
        abort(404)

    pilot_alias = aliased(User, name='pilot')

    columns = {
        0: (Flight, 'index_score'),
        1: (pilot_alias, 'name'),
        2: (Flight, 'olc_classic_distance'),
        3: (Airport, 'name'),
        4: (Club, 'name'),
        5: (AircraftModel, 'name'),
        6: (Flight, 'takeoff_time'),
        7: (Flight, 'id'),
        8: (Flight, 'num_comments'),
    }

    return _create_list('latest' if latest else 'date',
                        request.args,
                        date=date,
                        columns=columns)
Exemplo n.º 15
0
 def __init__(self, Acl, AclAccess, Compensate, signatures=None):
     '''
     Construct the compensate service alchemy.
     
     @param AclAccess: class of WithAclAccess
         The ACL access relation mapped class.
     @param Compensate: class of WithCompensate
         The compensate relation mapped class.
     @param signatures: dictionary{string: string|callable(identifier) -> string}
         A dictionary containing as keys the signatures that will be injected and as a value either the marker to be injected
         or a callable that takes the identifier as a parameter and provides the marker string value.
     '''
     assert isinstance(Acl, MappedSupport), 'Invalid mapped class %s' % Acl
     assert issubclass(AclAccess, WithAclAccess), 'Invalid acl access class %s' % AclAccess
     assert issubclass(Compensate, WithCompensate), 'Invalid compensate class %s' % Compensate
     if __debug__:
         if signatures is not None:
             assert isinstance(signatures, dict), 'Invalid fill in signatures %s' % signatures
             for signature, marker in signatures.items():
                 assert isinstance(signature, str), 'Invalid signature %s' % signature
                 assert isinstance(marker, str) or callable(marker), 'Invalid marker %s' % marker
     
     self.Acl = Acl
     self.AclIdentifier = modelId(Acl)
     self.AclAccess = AclAccess
     self.AliasAclAccess = aliased(AclAccess)
     self.Compensate = Compensate
     
     self.signatures = signatures
Exemplo n.º 16
0
def date(date, latest=False):
    try:
        if isinstance(date, (str, unicode)):
            date = datetime.strptime(date, "%Y-%m-%d")

        if isinstance(date, datetime):
            date = date.date()

    except:
        abort(404)

    pilot_alias = aliased(User, name='pilot')

    columns = {
        0: (Flight, 'index_score'),
        1: (pilot_alias, 'name'),
        2: (Flight, 'olc_classic_distance'),
        3: (Airport, 'name'),
        4: (Club, 'name'),
        5: (AircraftModel, 'name'),
        6: (Flight, 'takeoff_time'),
        7: (Flight, 'id'),
        8: (Flight, 'num_comments'),
    }

    return _create_list(
        'latest' if latest else 'date',
        request.args, date=date, columns=columns)
Exemplo n.º 17
0
    def date(self, date, **kw):
        try:
            if isinstance(date, str):
                date = datetime.strptime(date, "%Y-%m-%d")

            if isinstance(date, datetime):
                date = date.date()
        except:
            raise HTTPNotFound

        pilot_alias = aliased(User, name='pilot')

        columns = {
            0: (Flight, 'index_score'),
            1: (pilot_alias, 'display_name'),
            2: (Flight, 'olc_classic_distance'),
            3: (Airport, 'name'),
            4: (Club, 'name'),
            5: (AircraftModel, 'name'),
            6: (Flight, 'takeoff_time'),
            7: (Flight, 'id'),
            8: (Flight, 'num_comments'),
        }

        if kw.get('today', False):
            return self.__do_list('today', kw, date=date, columns=columns)
        else:
            return self.__do_list('date', kw, date=date, columns=columns)
Exemplo n.º 18
0
 def rank(self):
     inner_team = aliased(Team)
     return (DBSession.query(func.count('*') + 1).
             select_from(inner_team).
             filter(inner_team.score > Team.score).
             correlate(Team).
             label('rank'))
Exemplo n.º 19
0
def synchronize_stocks() -> None:
    # Alias for provider table, and explicit "on" clause for the "join", are mandatory because
    # VenueProvider model already has a "select" on the provider table for its polymorphic query
    provider_alias = aliased(Provider)

    venue_providers = (
        VenueProvider.query.join(provider_alias, provider_alias.id == VenueProvider.providerId)
        .filter(provider_alias.apiUrl.isnot(None))
        .filter(provider_alias.isActive.is_(True))
        .filter(VenueProvider.isActive.is_(True))
        .all()
    )

    for venue_provider in venue_providers:
        # We need to stock these values inside variables to prevent a crash
        # if the session is broken and we need to log them
        venue_id = venue_provider.venueId
        venue_provider_id = venue_provider.id
        venue_id_at_offer_provider = venue_provider.venueIdAtOfferProvider
        provider_name = venue_provider.provider.name

        try:
            synchronize_provider_api.synchronize_venue_provider(venue_provider)
        except Exception as exc:  # pylint: disable=broad-except
            logger.exception("Could not synchronize venue_provider=%s: %s", venue_provider_id, exc)
            notion_connector.add_to_synchronization_error_database(
                exception=exc,
                provider_name=provider_name,
                venue_id=venue_id,
                venue_id_at_offer_provider=venue_id_at_offer_provider,
            )
            db.session.rollback()
Exemplo n.º 20
0
    def getOriginalSource(self, source):
        originalSource = aliased(SourceMapped, name="original_source")

        sql = self.session().query(originalSource)
        sql = sql.join(SourceMapped, originalSource.URI == SourceMapped.OriginURI)
        sql = sql.filter(SourceMapped.Id == source)

        return sql.one().Id
Exemplo n.º 21
0
    def get_data(self):
        logging.debug("These queries will take a few mins to run.")

        budget_query = self.session.query(
            self.models.MovieInfo.movie_id,
            func.max(
                cast(
                    func.replace(
                        func.replace(self.models.MovieInfo.info, ",", ""),
                        "$", ""),
                    types.Numeric)
            ).label('budget')
        ).filter(
            self.models.MovieInfo.movie_id.in_(self.movie_ids),
            self.models.MovieInfo.info_type_id.in_([BUDGET_TYPE_ID]),
            self.models.MovieInfo.info.like('$%'),
        ).group_by(self.models.MovieInfo.movie_id
        ).subquery()

        year_query = self.session.query(
            self.models.MovieInfo.movie_id,
            func.min(
                func.substr(self.models.MovieInfo.info, -4)
            ).label('release_year')
        ).filter(
            self.models.MovieInfo.movie_id.in_(self.movie_ids),
            self.models.MovieInfo.info_type_id.in_([RELEASE_DATES_ID]),
            self.models.MovieInfo.info.like('USA:%')
        ).group_by(self.models.MovieInfo.movie_id
        ).subquery()

        budget_alias = aliased(self.models.MovieInfo, budget_query)
        year_alias = aliased(self.models.MovieInfo, year_query)

        return self.session.query(
            budget_query.columns.movie_id,
            budget_query.columns.budget,
            year_query.columns.release_year
        ).join(
            year_alias, year_alias.movie_id == budget_alias.movie_id
        ).distinct(
            self.models.MovieInfo.movie_id
        ).filter(
            self.models.MovieInfo.movie_id.in_(self.movie_ids),
        )
Exemplo n.º 22
0
    def getUnpublishedBySource(self, sourceId, thumbSize=None, offset=None, limit=None, detailed=False, q=None):
        '''
        @see: IBlogPostService.getUnpublished
        '''
        assert q is None or isinstance(q, QBlogPostUnpublished), 'Invalid query %s' % q
        
        postVerification = aliased(PostVerificationMapped, name='post_verification_filter')
              
        sql = self.session().query(BlogPostMapped)
        sql = sql.filter(BlogPostMapped.Feed == sourceId)
        
        deleted = False
        if q:
            if QBlogPostUnpublished.isDeleted in q:
                deleted = q.isDeleted.value                
            sql = buildQuery(sql, q, BlogPostMapped)
        
        if q:
            if QWithCId.search in q:
                all = self._processLike(q.search.ilike) if q.search.ilike is not None else self._processLike(q.search.like)
                sql = sql.filter(or_(BlogPostMapped.Meta.ilike(all), BlogPostMapped.CreatedOn.ilike(all), \
                                     BlogPostMapped.Content.ilike(all), BlogPostMapped.ContentPlain.ilike(all), \
                                     ))
                
            if QWithCId.status in q or QWithCId.checker in q:
                sql = sql.join(postVerification, postVerification.Id == BlogPostMapped.Id)     
                sql = sql.join(VerificationStatusMapped, VerificationStatusMapped.id == postVerification.statusId) 
                
            if QWithCId.status in q: 
                sql = sql.filter(VerificationStatusMapped.Key == q.status.equal) 
                
            if QWithCId.checker in q: 
                sql = sql.filter(postVerification.Checker == q.checker.equal)
                
            if (QWithCId.cId not in q) or (QWithCId.cId in q and QWithCId.cId.start not in q \
               and QWithCId.cId.end not in q and QWithCId.cId.since not in q and QWithCId.cId.until not in q):
                
                sql = sql.filter(BlogPostMapped.PublishedOn == None) 
                if deleted: sql = sql.filter(BlogPostMapped.DeletedOn != None)
                else: sql = sql.filter(BlogPostMapped.DeletedOn == None)
                
        else: sql = sql.filter((BlogPostMapped.PublishedOn == None) & (BlogPostMapped.DeletedOn == None))     
                                   
        sql = sql.order_by(desc_op(BlogPostMapped.Order))
        sqlLimit = buildLimits(sql, offset, limit)
 
        posts = self._addImages(self._trimPosts(sqlLimit.distinct(), deleted= not deleted, unpublished=False, published=True), thumbSize)
        if detailed:
            posts = IterPost(posts, sql.distinct().count(), offset, limit)
            
            lastCidSql = self.session().query(func.MAX(BlogPostMapped.CId))
            lastCidSql = lastCidSql.join(CollaboratorMapped, BlogPostMapped.Creator == CollaboratorMapped.User)
            lastCidSql = lastCidSql.filter(CollaboratorMapped.Source == sourceId)
            
            posts.lastCId = lastCidSql.scalar()
            
        return posts
Exemplo n.º 23
0
 def __init__(self, db="", sql_logging=False, schema=None):
     if db == "" or db is None:
         # In-memory SQLite
         connect_url = "sqlite:///"
     if "://" in db:
         # User has provided a full path
         connect_url = db
     else:
         # Assume a SQLite file
         connect_url = "sqlite:///%s" % db
     engine = sqlalchemy.create_engine(connect_url, echo=sql_logging)
     self._orm = _Orm(engine, schema=schema)
     Session = sessionmaker(bind=engine)
     self._session = Session()
     self._stoptime1 = aliased(StopTime, name="first_stop_time")
     self._stoptime2 = aliased(StopTime, name="second_stop_time")
     self._transfer_fromstop = aliased(Stop, name="tr_from_stop")
     self._transfer_tostop = aliased(Stop, name="tr_to_stop")
Exemplo n.º 24
0
    def getOriginalSource(self, source):
        originalSource = aliased(SourceMapped, name='original_source')

        sql = self.session().query(originalSource)
        sql = sql.join(SourceMapped,
                       originalSource.URI == SourceMapped.OriginURI)
        sql = sql.filter(SourceMapped.Id == source)

        return sql.one().Id
Exemplo n.º 25
0
 def __init__(self, db="", sql_logging=False, schema=None):
     if db == "" or db is None:
         # In-memory SQLite
         connect_url = "sqlite:///"
     if "://" in db:
         # User has provided a full path
         connect_url = db
     else:
         # Assume a SQLite file
         connect_url = "sqlite:///%s" % db
     engine = sqlalchemy.create_engine(connect_url, echo=sql_logging)
     self._orm = _Orm(engine, schema=schema)
     Session = sessionmaker(bind=engine)
     self._session = Session()
     self._stoptime1 = aliased(StopTime, name="first_stop_time")
     self._stoptime2 = aliased(StopTime, name="second_stop_time")
     self._transfer_fromstop = aliased(Stop, name="tr_from_stop")
     self._transfer_tostop = aliased(Stop, name="tr_to_stop")
Exemplo n.º 26
0
    def rightsForRbacSQL(self, rbacId, sql=None):
        """
        @see: IRbacService.rightsForRbacSQL
        """
        child, parent = aliased(RoleNode), aliased(RoleNode)

        subq = sql or self.session().query(RightMapped)
        subq = subq.join(RbacRight, RbacRight.right == RightMapped.Id)
        subq = subq.join(child, child.role == RbacRight.rbac)
        subq = subq.join(parent, and_(child.left >= parent.left, child.right <= parent.right))
        subq = subq.join(RbacRole, and_(RbacRole.role == parent.role, RbacRole.rbac == rbacId))

        sql = sql or self.session().query(RightMapped)
        sql = sql.join(RbacRight, and_(RbacRight.right == RightMapped.Id, RbacRight.rbac == rbacId))

        sql = sql.union(subq).distinct(RightMapped.Id).order_by(RightMapped.Id)

        return sql
Exemplo n.º 27
0
def set_recent_outings(waypoint, lang):
    """Set last 10 outings on routes associated to the given waypoint.
    """
    t_outing_route = aliased(Association, name="a1")
    t_route_wp = aliased(Association, name="a2")
    with_query_waypoints = _get_select_children(waypoint)

    recent_outing_ids = get_first_column(
        DBSession.query(Outing.document_id)
        .filter(Outing.redirects_to.is_(None))
        .join(t_outing_route, Outing.document_id == t_outing_route.child_document_id)
        .join(
            t_route_wp,
            and_(
                t_route_wp.child_document_id == t_outing_route.parent_document_id,
                t_route_wp.child_document_type == ROUTE_TYPE,
            ),
        )
        .join(with_query_waypoints, with_query_waypoints.c.document_id == t_route_wp.parent_document_id)
        .order_by(Outing.date_end.desc())
        .limit(NUM_RECENT_OUTINGS)
        .all()
    )

    total = (
        DBSession.query(Outing.document_id)
        .filter(Outing.redirects_to.is_(None))
        .join(t_outing_route, Outing.document_id == t_outing_route.child_document_id)
        .join(
            t_route_wp,
            and_(
                t_route_wp.child_document_id == t_outing_route.parent_document_id,
                t_route_wp.child_document_type == ROUTE_TYPE,
            ),
        )
        .join(with_query_waypoints, with_query_waypoints.c.document_id == t_route_wp.parent_document_id)
        .count()
    )

    waypoint.associations["recent_outings"] = get_documents_for_ids(
        recent_outing_ids, lang, outing_documents_config, total
    )
Exemplo n.º 28
0
        def filter_query(query):
            t_outing_route = aliased(Association, name='a1')
            t_route_wp = aliased(Association, name='a2')
            t_route = aliased(Document, name='r')

            return query. \
                join(
                    t_outing_route,
                    Outing.document_id == t_outing_route.child_document_id). \
                join(
                    t_route,
                    and_(
                        t_outing_route.parent_document_id ==
                        t_route.document_id,
                        t_route.type == ROUTE_TYPE)). \
                join(
                    t_route_wp,
                    and_(
                        t_route_wp.parent_document_id == waypoint_id,
                        t_route_wp.child_document_id == t_route.document_id))
Exemplo n.º 29
0
        def filter_query(query):
            t_outing_route = aliased(Association, name='a1')
            t_route_wp = aliased(Association, name='a2')
            t_route = aliased(Document, name='r')

            return query. \
                join(
                    t_outing_route,
                    Outing.document_id == t_outing_route.child_document_id). \
                join(
                    t_route,
                    and_(
                        t_outing_route.parent_document_id ==
                        t_route.document_id,
                        t_route.type == ROUTE_TYPE)). \
                join(
                    t_route_wp,
                    and_(
                        t_route_wp.parent_document_id == waypoint_id,
                        t_route_wp.child_document_id == t_route.document_id))
Exemplo n.º 30
0
def artifact_reagent_labels(session, artifacts):
    """
    Retrieve all artifacts' ancestors with their samples and reagent labels.
    Only the one that occurs once are an accurate representation of the sample/reagent label relationship.
    """
    # second artifact table to join artifacts with their ancestors
    ancestors_artifact = aliased(t.Artifact)
    q = session.query(t.Artifact.artifactid, ancestors_artifact.luid, t.ReagentLabel.name, t.Sample.name) \
        .join(ancestors_artifact, t.Artifact.ancestors) \
        .join(ancestors_artifact.reagentlabels) \
        .join(ancestors_artifact.samples)
    q = q.filter(t.Artifact.artifactid.in_(artifacts))
    return q.all()
Exemplo n.º 31
0
def get_recent_friend_user_checkpoints(user_obj, limit = None):
    """
    (faux notification) returns Checkpoints that were recently created by friends
    """
    if limit == None:
        limit = 8
    
    from db import UserCheckpoint, db, FacebookUser, User, FriendConnection, Checkpoint
    
    FriendUserCheckpoint, FriendFacebookUser, FriendUser = aliased(UserCheckpoint), aliased(FacebookUser), aliased(User)
    q = (db.session.query(UserCheckpoint).
         join(Checkpoint, Checkpoint.id == UserCheckpoint.checkpoint_id).
         join(FriendUser, FriendUser.id == UserCheckpoint.user_id).
         join(FriendFacebookUser, FriendFacebookUser.id == FriendUser.facebook_user_id).
         join(FriendConnection, FriendConnection.fb_user_to == FriendFacebookUser.id).
         join(FacebookUser, FacebookUser.id == FriendConnection.fb_user_from).
         join(User, User.facebook_user_id == FacebookUser.id).
         filter(User.id == user_obj.id).
         filter(Checkpoint.creator == FriendUser.id).
         filter(Checkpoint.demo == False)
         )
    
    return q.order_by(desc(Checkpoint.date_created)).limit(limit).all()
Exemplo n.º 32
0
    def get_for_kontkurs(cls, kontkurs, subgroup_number, *args, **kwargs):
        # Kontgrp2 = aliased(Kontgrp)
        #
        Kontkurs2 = aliased(Kontkurs)
        Kontgrp2 = aliased(Kontgrp)
        KontgrpCommon = aliased(Kontgrp)
        KontgrpParent = aliased(Kontgrp)

        if int(subgroup_number):
            filter_ = and_(or_(
                Raspnagr.kontkurs_id == kontkurs.id,
                Kontlist.kontkurs_id == kontkurs.id,
                KontgrpCommon.kont_id == kontkurs.id,
            ), or_(
                KontgrpCommon.id == None,
                and_(KontgrpParent.id == None, KontgrpCommon.ngroup == subgroup_number),
                KontgrpParent.ngroup == subgroup_number
            ), )
        else:
            filter_ = or_(
                Raspnagr.kontkurs_id == kontkurs.id,
                KontgrpCommon.kont_id == kontkurs.id,
                Kontkurs2.id == kontkurs.id,
            )

        raspis = Raspis.query \
            .join(Raspnagr) \
            .outerjoin(Kontgrp, Raspnagr.kontgrp_id == Kontgrp.id) \
            .outerjoin(Kontgrplist, Kontgrplist.op == Raspnagr.op) \
            .outerjoin(Kontlist, Kontlist.op == Raspnagr.op) \
            .outerjoin(Kontkurs2, Kontkurs2.id == Kontlist.kontkurs_id) \
            .outerjoin(Kontgrp2, Kontgrp2.id == Kontgrplist.kontgrp_id) \
            .outerjoin(KontgrpCommon, func.coalesce(Kontgrp2.id, Kontgrp.id) == KontgrpCommon.id) \
            .outerjoin(KontgrpParent, KontgrpCommon.parent_id == KontgrpParent.id) \
            .filter(filter_)

        return cls._get_table(raspis, *args, **kwargs)
Exemplo n.º 33
0
def get_friends(user_obj, exclude_self=None):
    """
    returns a list of <<User>> objects, if exclude_self is True, it will not consider the current
    user as a friend.
    """
    if exclude_self == None:
        exclude_self = False
    
    from db import db, User, FriendConnection, FacebookUser
    
    FriendFacebookUser, FriendUser = aliased(FacebookUser), aliased(User)
    friends_q = (db.session.query(User, FriendUser).
               join(FacebookUser, FacebookUser.id == User.facebook_user_id).
               join(FriendConnection, FriendConnection.fb_user_from == FacebookUser.id).
               join(FriendFacebookUser, FriendConnection.fb_user_to == FriendFacebookUser.id).
               join(FriendUser, FriendUser.facebook_user_id == FriendFacebookUser.id)
               )
    friends_q = friends_q.filter(User.id == user_obj.id)
    
    friends = [f[1] for f in friends_q.all()]
    if not exclude_self:
        friends += [user_obj]
    
    return friends
Exemplo n.º 34
0
def getCountries():
    '''
    fetch all country data
    '''
    # fetching from the database
    session = Session()
    c = aliased(Country)
    country_objects = session.query(c.iso, c.name, c.phonecode).all()

    # transforming into JSON-serializable objects
    schema = CountrySchema(many=True)
    countryList = schema.dump(country_objects)

    # serializing as JSON
    session.close()
    return json.dumps(countryList.data)
Exemplo n.º 35
0
def club(id):
    club = get_requested_record(Club, id)
    pilot_alias = aliased(User, name='pilot')

    columns = {
        0: (Flight, 'date_local'),
        1: (Flight, 'index_score'),
        2: (pilot_alias, 'name'),
        3: (Flight, 'olc_classic_distance'),
        4: (Airport, 'name'),
        5: (AircraftModel, 'name'),
        6: (Flight, 'takeoff_time'),
        7: (Flight, 'id'),
        8: (Flight, 'num_comments'),
    }

    return _create_list('club', request.args, club=club, columns=columns)
Exemplo n.º 36
0
    def airport(self, id, **kw):
        airport = get_requested_record(Airport, id)
        pilot_alias = aliased(User, name='pilot')

        columns = {
            0: (Flight, 'date_local'),
            1: (Flight, 'index_score'),
            2: (pilot_alias, 'display_name'),
            3: (Flight, 'olc_classic_distance'),
            4: (Club, 'name'),
            5: (AircraftModel, 'name'),
            6: (Flight, 'takeoff_time'),
            7: (Flight, 'id'),
            8: (Flight, 'num_comments'),
        }

        return self.__do_list('airport', kw, airport=airport, columns=columns)
Exemplo n.º 37
0
def club(id):
    club = get_requested_record(Club, id)
    pilot_alias = aliased(User, name='pilot')

    columns = {
        0: (Flight, 'date_local'),
        1: (Flight, 'index_score'),
        2: (pilot_alias, 'name'),
        3: (Flight, 'olc_classic_distance'),
        4: (Airport, 'name'),
        5: (AircraftModel, 'name'),
        6: (Flight, 'takeoff_time'),
        7: (Flight, 'id'),
        8: (Flight, 'num_comments'),
    }

    return _create_list('club', request.args, club=club, columns=columns)
Exemplo n.º 38
0
    def getUnpublished(self, blogId, typeId=None, creatorId=None, authorId=None, thumbSize=None, offset=None, limit=None,
                       detailed=False, q=None):
        '''
        @see: IBlogPostService.getUnpublished
        '''
        assert q is None or isinstance(q, QBlogPostUnpublished), 'Invalid query %s' % q
        
        postVerification = aliased(PostVerificationMapped, name='post_verification_filter')
        
        sql = self._filterQuery(blogId, typeId, creatorId, authorId, q)

        deleted = False
        if q:
            if QBlogPostUnpublished.isDeleted in q:
                deleted = q.isDeleted.value  
                
            if QWithCId.cId in q and q.cId:
                sql = sql.filter(BlogPostMapped.CId != None)
            sql = buildQuery(sql, q, BlogPostMapped)
            
            if QWithCId.status in q or QWithCId.checker in q:
                sql = sql.join(postVerification, postVerification.Id == BlogPostMapped.Id)     
                sql = sql.join(VerificationStatusMapped, VerificationStatusMapped.id == postVerification.statusId) 
                
            if QWithCId.status in q: 
                sql = sql.filter(VerificationStatusMapped.Key == q.status.equal) 
                
            if QWithCId.checker in q: 
                sql = sql.filter(postVerification.Checker == q.checker.equal)
                
            if QWithCId.cId not in q:
                sql = sql.filter(BlogPostMapped.PublishedOn == None) 
                if deleted: sql = sql.filter(BlogPostMapped.DeletedOn != None)
                else: sql = sql.filter(BlogPostMapped.DeletedOn == None)
                
        else: sql = sql.filter((BlogPostMapped.PublishedOn == None) & (BlogPostMapped.DeletedOn == None))    

        sql = sql.order_by(desc_op(BlogPostMapped.Order))
        
        sqlLimit = buildLimits(sql, offset, limit)
        posts = self._addImages(self._trimPosts(sqlLimit.distinct(), unpublished=False, published=True), thumbSize)
        if detailed:
            posts = IterPost(posts, sql.distinct().count(), offset, limit)
            posts.lastCId = self.session().query(func.MAX(BlogPostMapped.CId)).filter(BlogPostMapped.Blog == blogId).scalar()
        return posts
Exemplo n.º 39
0
def pilot(id):
    pilot = get_requested_record(User, id)
    pilot_alias = aliased(User, name='pilot')

    mark_flight_notifications_read(pilot)

    columns = {
        0: (Flight, 'date_local'),
        1: (Flight, 'index_score'),
        2: (pilot_alias, 'name'),
        3: (Flight, 'olc_classic_distance'),
        4: (Airport, 'name'),
        5: (AircraftModel, 'name'),
        6: (Flight, 'takeoff_time'),
        7: (Flight, 'id'),
        8: (Flight, 'num_comments'),
    }

    return _create_list('pilot', request.args, pilot=pilot, columns=columns)
Exemplo n.º 40
0
def step_info_with_output(session,
                          step_name,
                          project_name=None,
                          sample_name=None,
                          time_from=None,
                          time_to=None,
                          container_name=None,
                          artifact_udfs=None):
    """
    Get a join of output artifact UDFs, for all step of the provided name - filterable to a
     project, sample name, container name or date range.
    """

    q = session.query(t.Process.luid, t.Process.daterun, t.ProcessIOTracker.inputartifactid,
                      t.Sample.name, t.Project.name, t.Artifact.luid)\
        .join(t.Process.type)\
        .join(t.Process.processiotrackers) \
        .join(t.ProcessIOTracker.artifact) \
        .join(t.Artifact.samples)\
        .join(t.Sample.project)\
        .filter(t.ProcessType.displayname == step_name)\

    if artifact_udfs:
        output_artifact = aliased(t.Artifact)
        q = q.join(t.ProcessIOTracker.output)\
            .join(output_artifact, t.OutputMapping.outputartifactid == output_artifact.artifactid) \
            .join(output_artifact.udfs) \
            .filter(t.ArtifactUdfView.udfname.in_(artifact_udfs)) \
            .filter(t.ArtifactUdfView.udfvalue != None)
        q = q.add_columns(t.ArtifactUdfView.udfname,
                          t.ArtifactUdfView.udfvalue)

    if container_name:
        q = q.filter(t.Container.name == container_name)

    if time_from:
        q = q.filter(t.Process.daterun > func.date(time_from))

    if time_to:
        q = q.filter(t.Process.daterun < func.date(time_to))

    q = add_filters(q, project_name=project_name, sample_name=sample_name)
    return q.all()
Exemplo n.º 41
0
def pilot(id):
    pilot = get_requested_record(User, id)
    pilot_alias = aliased(User, name='pilot')

    mark_flight_notifications_read(pilot)

    columns = {
        0: (Flight, 'date_local'),
        1: (Flight, 'index_score'),
        2: (pilot_alias, 'name'),
        3: (Flight, 'olc_classic_distance'),
        4: (Airport, 'name'),
        5: (AircraftModel, 'name'),
        6: (Flight, 'takeoff_time'),
        7: (Flight, 'id'),
        8: (Flight, 'num_comments'),
    }

    return _create_list('pilot', request.args, pilot=pilot, columns=columns)
Exemplo n.º 42
0
Arquivo: topic.py Projeto: dbdoer/ioa
    async def find_topics(self, page: int):

        lastReplyAlias = aliased(UserTable)

        query = await self.connection.execute(
            select([
                TopicTable,
                UserTable.c.username,
                lastReplyAlias.c.username.label("last_replied_by_username"),
            ], TopicTable.c.deleted_at == None).select_from(
                TopicTable.outerjoin(
                    UserTable,
                    TopicTable.c.author_id == UserTable.c.id).outerjoin(
                        lastReplyAlias,
                        TopicTable.c.last_replied_by_user_id == lastReplyAlias.
                        c.id)).order_by(desc(
                            TopicTable.c.last_replied_at)).limit(50).offset(
                                (page - 1) * 50))
        topics = await query.fetchall()
        return topics
Exemplo n.º 43
0
def add(money):
    #     sql = select([select([func.coalesce(func.sum(Transaction.money), 0) + money]).alias('tmp')])
    #     sql = select([func.sum(Transaction.money).label('g')])

    Tr = aliased(Transaction, name="tr")
    sql = select([func.coalesce(func.sum(Tr.balance), 0) + money])
    #     sql = sa_session.query(func.sum(Transaction.money))
    #     .as_scalar()
    #     .label('aa')
    #     print dir(sql)
    #     print 'sql', sql
    tr = Transaction(money=money)
    tr.balance = sql
    print "tr.created_at0", tr.created_at
    #     tr.moeny = 4294967295
    #     tr.balance = -1
    sa_session.add(tr)
    print "tr.created_at1", tr.created_at
    sa_session.commit()
    print "tr.created_at2", tr.created_at
Exemplo n.º 44
0
    def get_common_data(self):
        Auditory2 = aliased(Auditory)

        return RaspisZaoch.query \
            .outerjoin(Raspnagr, Raspnagr.id == RaspisZaoch.raspnagr_id) \
            .outerjoin(Auditory, RaspisZaoch.aud == Auditory.id) \
            .outerjoin(Auditory2, RaspisZaoch.aud2 == Auditory2.id) \
            .outerjoin(Teacher, Teacher.id == Raspnagr.prep_id) \
            .outerjoin(Discipline) \
            .outerjoin(Normtime) \
            .outerjoin(Kontgrp, Raspnagr.kontgrp_id == Kontgrp.id) \
            .outerjoin(Kontkurs, Raspnagr.kontkurs_id == Kontkurs.id) \
            .outerjoin(Potoklist, Raspnagr.op == Potoklist.op) \
            .order_by(RaspisZaoch.dt,
                      RaspisZaoch.para) \
            .with_entities(
                RaspisZaoch.id,
                RaspisZaoch.raspnagr_id,
                RaspisZaoch.dt,
                RaspisZaoch.para,
                RaspisZaoch.aud,
                RaspisZaoch.aud2,
                RaspisZaoch.kont_id,
                RaspisZaoch.kontgrp_id,
                RaspisZaoch.op_id,
                RaspisZaoch.type,
                RaspisZaoch.hours,
                Auditory.maxstud,
                Auditory2.maxstud.label('maxstud2'),
                Raspnagr.prep_id,
                Raspnagr.stud,
                Raspnagr.nt,
                func.rtrim(Normtime.title).label('normtime'),
                func.rtrim(Teacher.name).label("teacher"),
                func.rtrim(Auditory.title).label("auditory"),
                func.rtrim(Auditory2.title).label("auditory2"),
                func.rtrim(Discipline.title).label("discipline"),
                func.coalesce(Potoklist.title, Kontgrp.title, Kontkurs.title).label('konts')
            )
def add_allocine_internal_ids() -> None:
    logger.info("Starting to update allocine pivots internal id")

    # Approx 900 rows so no performance issue to select all
    allocine_pivots: list[AllocinePivot] = AllocinePivot.query.all()
    logger.info("%d allocine pivots found", len(allocine_pivots))

    for allocine_pivot in allocine_pivots:
        decoded_id = base64.b64decode(allocine_pivot.theaterId).decode("ascii")
        internal_id = decoded_id.split("Theater:")[1]
        allocine_pivot.internalId = internal_id

    db.session.bulk_save_objects(allocine_pivots)
    db.session.commit()
    logger.info("Allocine pivots have been updated")

    logger.info("Starting to update allocine venue providers internal id")

    # Approx 48 rows so no performance issue to select all
    venue_alias = aliased(Venue)
    allocine_venue_providers_with_pivot: list[tuple[
        AllocineVenueProvider,
        AllocinePivot]] = (AllocineVenueProvider.query.join(
            venue_alias, venue_alias.id == AllocineVenueProvider.venueId).join(
                AllocinePivot,
                AllocinePivot.siret == venue_alias.siret).with_entities(
                    AllocineVenueProvider, AllocinePivot).all())

    logger.info("%d allocine venue providers found",
                len(allocine_venue_providers_with_pivot))
    for allocine_venue_provider, allocine_pivot in allocine_venue_providers_with_pivot:
        allocine_venue_provider.internalId = allocine_pivot.internalId

    db.session.bulk_save_objects([
        allocine_venue_provider for allocine_venue_provider, _unused in
        allocine_venue_providers_with_pivot
    ])
    db.session.commit()
    logger.info("Allocine venue providers have been updated")
Exemplo n.º 46
0
    def __init__(self, Acl, AclAccess, Compensate, signatures=None):
        '''
        Construct the compensate service alchemy.
        
        @param AclAccess: class of WithAclAccess
            The ACL access relation mapped class.
        @param Compensate: class of WithCompensate
            The compensate relation mapped class.
        @param signatures: dictionary{string: string|callable(identifier) -> string}
            A dictionary containing as keys the signatures that will be injected and as a value either the marker to be injected
            or a callable that takes the identifier as a parameter and provides the marker string value.
        '''
        assert isinstance(Acl, MappedSupport), 'Invalid mapped class %s' % Acl
        assert issubclass(
            AclAccess,
            WithAclAccess), 'Invalid acl access class %s' % AclAccess
        assert issubclass(
            Compensate,
            WithCompensate), 'Invalid compensate class %s' % Compensate
        if __debug__:
            if signatures is not None:
                assert isinstance(
                    signatures,
                    dict), 'Invalid fill in signatures %s' % signatures
                for signature, marker in signatures.items():
                    assert isinstance(signature,
                                      str), 'Invalid signature %s' % signature
                    assert isinstance(
                        marker,
                        str) or callable(marker), 'Invalid marker %s' % marker

        self.Acl = Acl
        self.AclIdentifier = modelId(Acl)
        self.AclAccess = AclAccess
        self.AliasAclAccess = aliased(AclAccess)
        self.Compensate = Compensate

        self.signatures = signatures
Exemplo n.º 47
0
    def get_target_server(self, sess, stickyvalues, servertype, server_address, endpoint_name, endpoint_key):
        '''
        This method finds the target server who will handle this request based on stickyvalues or minimum load
        Logic: 
        -This method returns the server which handles the stickyvalue if found.
        -If not found , tries to find the minimum loaded server.
        -If current server is the minimum loaded servers, adds the corresponding sticky values instantly
        - At the end returns the dict server as a target server and stickymappings if found already existing stickyness.

        If return dict contains the server and existing_mapping_list ==> already sticked server found
        If existing_mapping_list is empty means this is newly found minimum loaded server (either self or some other server)
        
        :param stickyvalue: the sticky value string.
        :param servertype: the type of the server
        :param server_address: he address of the server which is the unique key
        :param endpoint_key: unique key of the endpoint
        :param endpoint_name: name of the endpoint
        '''
        sess.flush()
#        logging.getLogger().debug("Servertype passed : %s", servertype)
        server = None
        existing_mapping_list = None
        sticky_found = False
        with self.__class__.internal_lock:
            #logging.getLogger().debug("stickyvalues : %s ", stickyvalues)
            if stickyvalues:
                try:
                    existing_mapping_list = sess.query(self.StickyMapping).filter(self.StickyMapping.sticky_value.in_(stickyvalues)).all()
                    sticky_found = True
                except Exception as e:
                    logging.getLogger().debug("Exception occurred in the stickymapping query : %s ", e)
            
            if not existing_mapping_list:
                min_loaded_server = None
                #Following query finds the minimum loaded server of the given type, with load less than 100%
                ParentServer = aliased(self.Server, name='parent_server')
                min_loaded_server = sess.query(self.Server, ParentServer).\
                            outerjoin(ParentServer, and_(self.Server.server_type==ParentServer.server_type, ParentServer.load < self.Server.load)).\
                            filter(and_(self.Server.server_type==servertype, ParentServer.load==None, self.Server.load < 100)).all()
                if min_loaded_server:
                    #logging.getLogger().debug("min loaded server found type : %s ", type(min_loaded_server))
                    s = None
                    for m in min_loaded_server:
                        if server_address == m[0].unique_key:
                            s=m[0]
                    server = s if s else min_loaded_server[0][0]
                    unique_key = server.unique_key
                    if unique_key == server_address:
                        try:
                            for s in stickyvalues:
                                sticky_record = self.StickyMapping(server.server_id, endpoint_key, endpoint_name, s)
                                sess.add(sticky_record) 
                            sess.commit()
                        except Exception as e:
                            logging.getLogger().debug("Exception occurred while adding sticky values : %s ", e)
                            sess.rollback()
                else:
                    #TODO  : Indicate System is totally full all extractors are 100%
                    pass #session closing need to be taken care in case raising exception
                    #raise Exception
            else:
                pass
                #logging.getLogger().debug("found existing mapping")
            if server is None:
                server_ids = set([e.server_id for e in existing_mapping_list])
                #logging.getLogger().debug("Already Sticky value found for: %s - servertype: %s. Sticky list is : %s", server_ids, servertype, existing_mapping_list)
                #The server_ids list above will be at most one server of each type even if sticky values for different type of servers are same
                server = sess.query(self.Server).filter(and_(self.Server.server_id.in_(server_ids), self.Server.server_type==servertype)).one()
            return_dict = {'target_server' : server, 'stickymapping' : existing_mapping_list, 'sticky_found' : sticky_found}
            '''
            If return dict contains the server and existing_mapping_list ==> already sticked server found
            If existing_mapping_list is empty means this is newly found minimum loaded server (either self or some other server)
            '''
#        logging.getLogger().debug("returning the redirect server : %s", return_dict)
        return  return_dict
Exemplo n.º 48
0
def get_time_series_analytics(request):
    import isodate
    from datetime import datetime
    start = request.GET.get("start", None)
    end = request.GET.get("end", None)
    interval = request.GET.get("interval", None)
    discussion = request.context._instance
    user_id = authenticated_userid(request) or Everyone
    try:
        if start:
            start = parse_datetime(start)
        if end:
            end = parse_datetime(end)
        if interval:
            interval = isodate.parse_duration(interval)
    except isodate.ISO8601Error as e:
        raise HTTPBadRequest(e)
    if interval and not start:
        raise HTTPBadRequest("You cannot define an interval and no start")
    if interval and not end:
        end = datetime.now()
    results = []

    from sqlalchemy import Table, MetaData, and_, case, cast, Float
    from sqlalchemy.exc import ProgrammingError
    import pprint
    import transaction
    with transaction.manager:
        metadata = MetaData(discussion.db.get_bind()
                            )  # make sure we are using the same connexion

        intervals_table = Table(
            'temp_table_intervals_' + str(user_id),
            metadata,
            Column('interval_id', Integer, primary_key=True),
            Column('interval_start', DateTime, nullable=False),
            Column('interval_end', DateTime, nullable=False),
            prefixes=None if discussion.using_virtuoso else ['TEMPORARY'])
        try:
            intervals_table.drop(
            )  # In case there is a leftover from a previous crash
        except ProgrammingError:
            pass
        intervals_table.create()
        interval_start = start
        intervals = []
        if interval:
            while interval_start < end:
                interval_end = min(interval_start + interval, end)
                intervals.append({
                    'interval_start': interval_start,
                    'interval_end': interval_end
                })
                interval_start = interval_start + interval
            #pprint.pprint(intervals)
            discussion.db.execute(intervals_table.insert(), intervals)
        else:
            raise HTTPBadRequest("Please specify an interval")

        from assembl.models import Post, AgentProfile, AgentStatusInDiscussion, ViewPost

        # The posters
        post_subquery = discussion.db.query(
            intervals_table.c.interval_id,
            func.count(distinct(Post.id)).label('count_posts'),
            func.count(distinct(Post.creator_id)).label('count_post_authors'),
            # func.DB.DBA.BAG_AGG(Post.creator_id).label('post_authors'),
            # func.DB.DBA.BAG_AGG(Post.id).label('post_ids'),
        )
        post_subquery = post_subquery.outerjoin(
            Post,
            and_(Post.creation_date >= intervals_table.c.interval_start,
                 Post.creation_date < intervals_table.c.interval_end,
                 Post.discussion_id == discussion.id))
        post_subquery = post_subquery.group_by(intervals_table.c.interval_id)
        post_subquery = post_subquery.subquery()

        # The cumulative posters
        cumulative_posts_aliased = aliased(Post)
        cumulative_posts_subquery = discussion.db.query(
            intervals_table.c.interval_id,
            func.count(distinct(
                cumulative_posts_aliased.id)).label('count_cumulative_posts'),
            func.count(distinct(cumulative_posts_aliased.creator_id)).label(
                'count_cumulative_post_authors')
            # func.DB.DBA.BAG_AGG(cumulative_posts_aliased.id).label('cumulative_post_ids')
        )
        cumulative_posts_subquery = cumulative_posts_subquery.outerjoin(
            cumulative_posts_aliased,
            and_(
                cumulative_posts_aliased.creation_date <
                intervals_table.c.interval_end,
                cumulative_posts_aliased.discussion_id == discussion.id))
        cumulative_posts_subquery = cumulative_posts_subquery.group_by(
            intervals_table.c.interval_id)
        cumulative_posts_subquery = cumulative_posts_subquery.subquery()

        # The post viewers
        postViewers = aliased(ViewPost)
        viewedPosts = aliased(Post)
        post_viewers_subquery = discussion.db.query(
            intervals_table.c.interval_id,
            func.count(distinct(
                postViewers.actor_id)).label('UNRELIABLE_count_post_viewers'))
        post_viewers_subquery = post_viewers_subquery.outerjoin(postViewers, and_(postViewers.creation_date >= intervals_table.c.interval_start, postViewers.creation_date < intervals_table.c.interval_end)).\
                        join(viewedPosts, and_(postViewers.post_id == viewedPosts.id, viewedPosts.discussion_id == discussion.id))
        post_viewers_subquery = post_viewers_subquery.group_by(
            intervals_table.c.interval_id)
        post_viewers_subquery = post_viewers_subquery.subquery()

        # The visitors
        firstTimeVisitorAgent = aliased(AgentStatusInDiscussion)
        visitors_subquery = discussion.db.query(
            intervals_table.c.interval_id,
            func.count(firstTimeVisitorAgent.id).label(
                'count_first_time_logged_in_visitors'),
            # func.DB.DBA.BAG_AGG(firstTimeVisitorAgent.id).label('first_time_visitors')
        )
        visitors_subquery = visitors_subquery.outerjoin(
            firstTimeVisitorAgent,
            and_(
                firstTimeVisitorAgent.first_visit >=
                intervals_table.c.interval_start,
                firstTimeVisitorAgent.first_visit <
                intervals_table.c.interval_end,
                firstTimeVisitorAgent.discussion_id == discussion.id))
        visitors_subquery = visitors_subquery.group_by(
            intervals_table.c.interval_id)
        visitors_subquery = visitors_subquery.subquery()

        # The cumulative visitors
        cumulativeVisitorAgent = aliased(AgentStatusInDiscussion)
        cumulative_visitors_query = discussion.db.query(
            intervals_table.c.interval_id,
            func.count(distinct(cumulativeVisitorAgent.id)).label(
                'count_cumulative_logged_in_visitors'),
            # func.DB.DBA.BAG_AGG(cumulativeVisitorAgent.id).label('first_time_visitors')
        )
        cumulative_visitors_query = cumulative_visitors_query.outerjoin(
            cumulativeVisitorAgent,
            and_(
                cumulativeVisitorAgent.first_visit <
                intervals_table.c.interval_end,
                cumulativeVisitorAgent.discussion_id == discussion.id))
        cumulative_visitors_query = cumulative_visitors_query.group_by(
            intervals_table.c.interval_id)
        cumulative_visitors_subquery = cumulative_visitors_query.subquery()
        # query = cumulative_visitors_query

        # The members (can go up and down...)  Assumes that first_subscribed is available
        commented_out = """ first_subscribed isn't yet filled in by assembl
        memberAgentStatus = aliased(AgentStatusInDiscussion)
        members_subquery = discussion.db.query(intervals_table.c.interval_id,
            func.count(memberAgentStatus.id).label('count_approximate_members')
            )
        members_subquery = members_subquery.outerjoin(memberAgentStatus, ((memberAgentStatus.last_unsubscribed >= intervals_table.c.interval_end) | (memberAgentStatus.last_unsubscribed.is_(None))) & ((memberAgentStatus.first_subscribed < intervals_table.c.interval_end) | (memberAgentStatus.first_subscribed.is_(None))) & (memberAgentStatus.discussion_id==discussion.id))
        members_subquery = members_subquery.group_by(intervals_table.c.interval_id)
        query = members_subquery
        members_subquery = members_subquery.subquery()
        """

        subscribersAgentStatus = aliased(AgentStatusInDiscussion)
        subscribers_query = discussion.db.query(
            intervals_table.c.interval_id,
            func.sum(
                case([(subscribersAgentStatus.last_visit == None, 0),
                      (and_(
                          subscribersAgentStatus.last_visit <
                          intervals_table.c.interval_end,
                          subscribersAgentStatus.last_visit >=
                          intervals_table.c.interval_start), 1)],
                     else_=0)).label('retention_count_last_visit_in_period'),
            func.sum(
                case(
                    [(subscribersAgentStatus.first_visit == None, 0),
                     (and_(
                         subscribersAgentStatus.first_visit <
                         intervals_table.c.interval_end,
                         subscribersAgentStatus.first_visit >=
                         intervals_table.c.interval_start), 1)],
                    else_=0)).label('recruitment_count_first_visit_in_period'),
            func.sum(
                case([(subscribersAgentStatus.first_subscribed == None, 0),
                      (and_(
                          subscribersAgentStatus.first_subscribed <
                          intervals_table.c.interval_end,
                          subscribersAgentStatus.first_subscribed >=
                          intervals_table.c.interval_start), 1)],
                     else_=0)).
            label('UNRELIABLE_recruitment_count_first_subscribed_in_period'),
            func.sum(
                case([(subscribersAgentStatus.last_unsubscribed == None, 0),
                      (and_(
                          subscribersAgentStatus.last_unsubscribed <
                          intervals_table.c.interval_end,
                          subscribersAgentStatus.last_unsubscribed >=
                          intervals_table.c.interval_start), 1)],
                     else_=0)
            ).label('UNRELIABLE_retention_count_first_subscribed_in_period'),
        )
        subscribers_query = subscribers_query.outerjoin(
            subscribersAgentStatus,
            subscribersAgentStatus.discussion_id == discussion.id)
        subscribers_query = subscribers_query.group_by(
            intervals_table.c.interval_id)
        subscribers_subquery = subscribers_query.subquery()
        #query = subscribers_query

        combined_query = discussion.db.query(
            intervals_table,
            post_subquery,
            cumulative_posts_subquery,
            post_viewers_subquery,
            visitors_subquery,
            cumulative_visitors_subquery,
            case([
                (cumulative_posts_subquery.c.count_cumulative_post_authors ==
                 0, None),
                (cumulative_posts_subquery.c.count_cumulative_post_authors !=
                 0, (cast(post_subquery.c.count_post_authors, Float) / cast(
                     cumulative_posts_subquery.c.count_cumulative_post_authors,
                     Float)))
            ]).label('fraction_cumulative_authors_who_posted_in_period'),
            case([
                (cumulative_visitors_subquery.c.
                 count_cumulative_logged_in_visitors == 0, None),
                (cumulative_visitors_subquery.c.
                 count_cumulative_logged_in_visitors != 0,
                 (cast(post_subquery.c.count_post_authors, Float) / cast(
                     cumulative_visitors_subquery.c.
                     count_cumulative_logged_in_visitors, Float)))
            ]).label(
                'fraction_cumulative_logged_in_visitors_who_posted_in_period'),
            subscribers_subquery,
        )
        combined_query = combined_query.join(
            post_subquery,
            post_subquery.c.interval_id == intervals_table.c.interval_id)
        combined_query = combined_query.join(
            post_viewers_subquery, post_viewers_subquery.c.interval_id ==
            intervals_table.c.interval_id)
        combined_query = combined_query.join(
            visitors_subquery,
            visitors_subquery.c.interval_id == intervals_table.c.interval_id)
        combined_query = combined_query.join(
            cumulative_visitors_subquery,
            cumulative_visitors_subquery.c.interval_id ==
            intervals_table.c.interval_id)
        # combined_query = combined_query.join(members_subquery, members_subquery.c.interval_id==intervals_table.c.interval_id)
        combined_query = combined_query.join(
            subscribers_subquery, subscribers_subquery.c.interval_id ==
            intervals_table.c.interval_id)
        combined_query = combined_query.join(
            cumulative_posts_subquery, cumulative_posts_subquery.c.interval_id
            == intervals_table.c.interval_id)

        query = combined_query
        query = query.order_by(intervals_table.c.interval_id)
        results = query.all()

        # pprint.pprint(results)
        # end of transaction

    intervals_table.drop()
    if not (request.GET.get('format', None) == 'csv'
            or request.accept == 'text/csv'):
        # json default
        from assembl.lib.json import DateJSONEncoder
        return Response(json.dumps(results, cls=DateJSONEncoder),
                        content_type='application/json')

    fieldnames = [
        "interval_id",
        "interval_start",
        "interval_end",
        "count_first_time_logged_in_visitors",
        "count_cumulative_logged_in_visitors",
        "fraction_cumulative_logged_in_visitors_who_posted_in_period",
        "count_post_authors",
        "count_cumulative_post_authors",
        "fraction_cumulative_authors_who_posted_in_period",
        "count_posts",
        "count_cumulative_posts",
        "recruitment_count_first_visit_in_period",
        "UNRELIABLE_recruitment_count_first_subscribed_in_period",
        "retention_count_last_visit_in_period",
        "UNRELIABLE_retention_count_first_subscribed_in_period",
        "UNRELIABLE_count_post_viewers",
    ]
    # otherwise assume csv
    return csv_response(fieldnames, [r._asdict() for r in results])
Exemplo n.º 49
0
from ally.support.sqlalchemy.util_service import buildQuery, buildLimits
from ally.container.support import setup
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm.util import aliased
from sqlalchemy.sql import functions as fn
from superdesk.person.meta.person import PersonMapped
from superdesk.post.api.post import IPostService, Post
from superdesk.post.meta.type import PostTypeMapped
from sqlalchemy.sql.operators import desc_op
from livedesk.api.blog_type_post import IBlogTypePostService, BlogTypePost, \
    QBlogTypePost, BlogTypePostPersist
from livedesk.meta.blog_type_post import BlogTypePostMapped, BlogTypePostEntry

# --------------------------------------------------------------------

UserPerson = aliased(PersonMapped)


@injected
@setup(IBlogTypePostService, name='blogTypePostService')
class BlogTypePostServiceAlchemy(SessionSupport, IBlogTypePostService):
    '''
    Implementation for @see: IBlogPostService
    '''

    postService = IPostService
    wire.entity('postService')

    def __init__(self):
        '''
        Construct the blog post service.
Exemplo n.º 50
0
def _create_list(
    date=None,
    pilot=None,
    club=None,
    airport=None,
    pinned=None,
    filter=None,
    default_sorting_column="score",
    default_sorting_order="desc",
):

    pilot_alias = aliased(User, name="pilot")
    owner_alias = aliased(User, name="owner")

    subq = (db.session.query(FlightComment.flight_id,
                             func.count("*").label("count")).group_by(
                                 FlightComment.flight_id).subquery())

    current_user = User.get(request.user_id) if request.user_id else None

    flights = (db.session.query(Flight, subq.c.count).filter(
        Flight.is_listable(current_user)).join(Flight.igc_file).options(
            contains_eager(Flight.igc_file)).join(
                owner_alias, IGCFile.owner).options(
                    contains_eager(Flight.igc_file,
                                   IGCFile.owner,
                                   alias=owner_alias)).outerjoin(
                                       pilot_alias, Flight.pilot).options(
                                           contains_eager(Flight.pilot,
                                                          alias=pilot_alias)).
               options(joinedload(Flight.co_pilot)).outerjoin(
                   Flight.club).options(contains_eager(Flight.club)).outerjoin(
                       Flight.takeoff_airport).options(
                           contains_eager(Flight.takeoff_airport)).outerjoin(
                               Flight.model).options(
                                   contains_eager(Flight.model)).outerjoin(
                                       (subq, Flight.comments)))

    if date:
        flights = flights.filter(Flight.date_local == date)

    if pilot:
        flights = flights.filter(
            or_(Flight.pilot == pilot, Flight.co_pilot == pilot))
    if club:
        flights = flights.filter(Flight.club == club)

    if airport:
        flights = flights.filter(Flight.takeoff_airport == airport)

    if pinned:
        flights = flights.filter(Flight.id.in_(pinned))

    if filter is not None:
        flights = flights.filter(filter)

    valid_columns = {
        "date": getattr(Flight, "date_local"),
        "score": getattr(Flight, "index_score"),
        "pilot": getattr(pilot_alias, "name"),
        "distance": getattr(Flight, "olc_classic_distance"),
        "airport": getattr(Airport, "name"),
        "club": getattr(Club, "name"),
        "aircraft": getattr(AircraftModel, "name"),
        "time": getattr(Flight, "takeoff_time"),
    }

    flights_count = flights.count()

    flights = Sorter.sort(
        flights,
        "flights",
        default_sorting_column,
        valid_columns=valid_columns,
        default_order=default_sorting_order,
    )

    flights = flights.order_by(Flight.index_score.desc())

    flights = Pager.paginate(
        flights,
        "flights",
        items_per_page=int(
            current_app.config.get("SKYLINES_LISTS_DISPLAY_LENGTH", 50)),
    )

    flight_schema = FlightSchema()
    flights_json = []
    for f, num_comments in flights:
        flight = flight_schema.dump(f).data
        flight["private"] = not f.is_rankable()
        flight["numComments"] = num_comments
        flights_json.append(flight)

    json = dict(flights=flights_json, count=flights_count)

    if date:
        json["date"] = date.isoformat()

    if pilot:
        user_schema = UserSchema(only=("id", "name"))
        json["pilot"] = user_schema.dump(pilot).data

    if club:
        club_schema = ClubSchema(only=("id", "name"))
        json["club"] = club_schema.dump(club).data

    if airport:
        airport_schema = AirportSchema(only=("id", "name", "countryCode"))
        json["airport"] = airport_schema.dump(airport).data

    return jsonify(json)
Exemplo n.º 51
0
def _create_list(
    date=None,
    pilot=None,
    club=None,
    airport=None,
    pinned=None,
    filter=None,
    default_sorting_column="score",
    default_sorting_order="desc",
):

    pilot_alias = aliased(User, name="pilot")
    owner_alias = aliased(User, name="owner")

    subq = (
        db.session.query(FlightComment.flight_id, func.count("*").label("count"))
        .group_by(FlightComment.flight_id)
        .subquery()
    )

    current_user = User.get(request.user_id) if request.user_id else None

    flights = (
        db.session.query(Flight, subq.c.count)
        .filter(Flight.is_listable(current_user))
        .join(Flight.igc_file)
        .options(contains_eager(Flight.igc_file))
        .join(owner_alias, IGCFile.owner)
        .options(contains_eager(Flight.igc_file, IGCFile.owner, alias=owner_alias))
        .outerjoin(pilot_alias, Flight.pilot)
        .options(contains_eager(Flight.pilot, alias=pilot_alias))
        .options(joinedload(Flight.co_pilot))
        .outerjoin(Flight.club)
        .options(contains_eager(Flight.club))
        .outerjoin(Flight.takeoff_airport)
        .options(contains_eager(Flight.takeoff_airport))
        .outerjoin(Flight.model)
        .options(contains_eager(Flight.model))
        .outerjoin((subq, Flight.comments))
    )

    if date:
        flights = flights.filter(Flight.date_local == date)

    if pilot:
        flights = flights.filter(or_(Flight.pilot == pilot, Flight.co_pilot == pilot))
    if club:
        flights = flights.filter(Flight.club == club)

    if airport:
        flights = flights.filter(Flight.takeoff_airport == airport)

    if pinned:
        flights = flights.filter(Flight.id.in_(pinned))

    if filter is not None:
        flights = flights.filter(filter)

    valid_columns = {
        "date": getattr(Flight, "date_local"),
        "score": getattr(Flight, "index_score"),
        "pilot": getattr(pilot_alias, "name"),
        "distance": getattr(Flight, "olc_classic_distance"),
        "airport": getattr(Airport, "name"),
        "club": getattr(Club, "name"),
        "aircraft": getattr(AircraftModel, "name"),
        "time": getattr(Flight, "takeoff_time"),
    }

    flights_count = flights.count()

    flights = Sorter.sort(
        flights, "flights", default_sorting_column, valid_columns=valid_columns, default_order=default_sorting_order
    )

    flights = flights.order_by(Flight.index_score.desc())

    flights = Pager.paginate(
        flights, "flights", items_per_page=int(current_app.config.get("SKYLINES_LISTS_DISPLAY_LENGTH", 50))
    )

    flight_schema = FlightSchema()
    flights_json = []
    for f, num_comments in flights:
        flight = flight_schema.dump(f).data
        flight["private"] = not f.is_rankable()
        flight["numComments"] = num_comments
        flights_json.append(flight)

    json = dict(flights=flights_json, count=flights_count)

    if date:
        json["date"] = date.isoformat()

    if pilot:
        user_schema = UserSchema(only=("id", "name"))
        json["pilot"] = user_schema.dump(pilot).data

    if club:
        club_schema = ClubSchema(only=("id", "name"))
        json["club"] = club_schema.dump(club).data

    if airport:
        airport_schema = AirportSchema(only=("id", "name", "countryCode"))
        json["airport"] = airport_schema.dump(airport).data

    return jsonify(json)
Exemplo n.º 52
0
def _create_list(date=None,
                 pilot=None,
                 club=None,
                 airport=None,
                 pinned=None,
                 filter=None,
                 default_sorting_column='score',
                 default_sorting_order='desc'):

    pilot_alias = aliased(User, name='pilot')
    owner_alias = aliased(User, name='owner')

    subq = db.session \
        .query(FlightComment.flight_id, func.count('*').label('count')) \
        .group_by(FlightComment.flight_id).subquery()

    current_user = User.get(request.user_id) if request.user_id else None

    flights = db.session.query(Flight, subq.c.count) \
        .filter(Flight.is_listable(current_user)) \
        .join(Flight.igc_file) \
        .options(contains_eager(Flight.igc_file)) \
        .join(owner_alias, IGCFile.owner) \
        .options(contains_eager(Flight.igc_file, IGCFile.owner, alias=owner_alias)) \
        .outerjoin(pilot_alias, Flight.pilot) \
        .options(contains_eager(Flight.pilot, alias=pilot_alias)) \
        .options(joinedload(Flight.co_pilot)) \
        .outerjoin(Flight.club) \
        .options(contains_eager(Flight.club)) \
        .outerjoin(Flight.takeoff_airport) \
        .options(contains_eager(Flight.takeoff_airport)) \
        .outerjoin(Flight.model) \
        .options(contains_eager(Flight.model)) \
        .outerjoin((subq, Flight.comments))

    if date:
        flights = flights.filter(Flight.date_local == date)

    if pilot:
        flights = flights.filter(
            or_(Flight.pilot == pilot, Flight.co_pilot == pilot))
    if club:
        flights = flights.filter(Flight.club == club)

    if airport:
        flights = flights.filter(Flight.takeoff_airport == airport)

    if pinned:
        flights = flights.filter(Flight.id.in_(pinned))

    if filter is not None:
        flights = flights.filter(filter)

    valid_columns = {
        'date': getattr(Flight, 'date_local'),
        'score': getattr(Flight, 'index_score'),
        'pilot': getattr(pilot_alias, 'name'),
        'distance': getattr(Flight, 'olc_classic_distance'),
        'airport': getattr(Airport, 'name'),
        'club': getattr(Club, 'name'),
        'aircraft': getattr(AircraftModel, 'name'),
        'time': getattr(Flight, 'takeoff_time'),
    }

    flights_count = flights.count()

    flights = Sorter.sort(flights,
                          'flights',
                          default_sorting_column,
                          valid_columns=valid_columns,
                          default_order=default_sorting_order)

    flights = flights.order_by(Flight.index_score.desc())

    flights = Pager.paginate(flights,
                             'flights',
                             items_per_page=int(
                                 current_app.config.get(
                                     'SKYLINES_LISTS_DISPLAY_LENGTH', 50)))

    flight_schema = FlightSchema()
    flights_json = []
    for f, num_comments in flights:
        flight = flight_schema.dump(f).data
        flight['private'] = not f.is_rankable()
        flight['numComments'] = num_comments
        flights_json.append(flight)

    json = dict(flights=flights_json, count=flights_count)

    if date:
        json['date'] = date.isoformat()

    if pilot:
        user_schema = UserSchema(only=('id', 'name'))
        json['pilot'] = user_schema.dump(pilot).data

    if club:
        club_schema = ClubSchema(only=('id', 'name'))
        json['club'] = club_schema.dump(club).data

    if airport:
        airport_schema = AirportSchema(only=('id', 'name', 'countryCode'))
        json['airport'] = airport_schema.dump(airport).data

    return jsonify(json)
Exemplo n.º 53
0
def _create_list(tab, kw, date=None, pilot=None, club=None, airport=None,
                 pinned=None, filter=None,
                 default_sorting_column='score', default_sorting_order='desc'):
    pilot_alias = aliased(User, name='pilot')
    owner_alias = aliased(User, name='owner')

    subq = db.session \
        .query(FlightComment.flight_id, func.count('*').label('count')) \
        .group_by(FlightComment.flight_id).subquery()

    flights = db.session.query(Flight, subq.c.count) \
        .filter(Flight.is_listable(g.current_user)) \
        .join(Flight.igc_file) \
        .options(contains_eager(Flight.igc_file)) \
        .join(owner_alias, IGCFile.owner) \
        .options(contains_eager(Flight.igc_file, IGCFile.owner, alias=owner_alias)) \
        .outerjoin(pilot_alias, Flight.pilot) \
        .options(contains_eager(Flight.pilot, alias=pilot_alias)) \
        .options(joinedload(Flight.co_pilot)) \
        .outerjoin(Flight.club) \
        .options(contains_eager(Flight.club)) \
        .outerjoin(Flight.takeoff_airport) \
        .options(contains_eager(Flight.takeoff_airport)) \
        .outerjoin(Flight.model) \
        .options(contains_eager(Flight.model)) \
        .outerjoin((subq, Flight.comments))

    if date:
        flights = flights.filter(Flight.date_local == date)

    if pilot:
        flights = flights.filter(or_(Flight.pilot == pilot,
                                     Flight.co_pilot == pilot))
    if club:
        flights = flights.filter(Flight.club == club)

    if airport:
        flights = flights.filter(Flight.takeoff_airport == airport)

    if pinned:
        flights = flights.filter(Flight.id.in_(pinned))

    if filter is not None:
        flights = flights.filter(filter)

    valid_columns = {
        'date': getattr(Flight, 'date_local'),
        'score': getattr(Flight, 'index_score'),
        'pilot': getattr(pilot_alias, 'name'),
        'distance': getattr(Flight, 'olc_classic_distance'),
        'airport': getattr(Airport, 'name'),
        'club': getattr(Club, 'name'),
        'aircraft': getattr(AircraftModel, 'name'),
        'time': getattr(Flight, 'takeoff_time'),
    }

    flights_count = flights.count()

    flights = Sorter.sort(flights, 'flights', default_sorting_column,
                          valid_columns=valid_columns,
                          default_order=default_sorting_order)

    flights = flights.order_by(Flight.index_score.desc())

    flights = Pager.paginate(flights, 'flights',
                             items_per_page=int(current_app.config.get('SKYLINES_LISTS_DISPLAY_LENGTH', 50)))

    return render_template('flights/list.jinja',
                           tab=tab, date=date, pilot=pilot, club=club,
                           airport=airport, flights=flights,
                           flights_count=flights_count)
Exemplo n.º 54
0
    def __do_list(self, tab, kw, date=None, pilot=None, club=None, airport=None, \
                  pinned=None, filter=None, columns=None):
        pilot_alias = aliased(User, name='pilot')
        owner_alias = aliased(User, name='owner')

        subq = DBSession.query(FlightComment.flight_id,
                               func.count('*').label('count')) \
               .group_by(FlightComment.flight_id).subquery()

        flights = DBSession.query(Flight, subq.c.count) \
            .outerjoin(Flight.igc_file) \
            .options(contains_eager(Flight.igc_file)) \
            .outerjoin(owner_alias, IGCFile.owner) \
            .options(contains_eager(Flight.igc_file, IGCFile.owner, alias=owner_alias)) \
            .outerjoin(pilot_alias, Flight.pilot) \
            .options(contains_eager(Flight.pilot, alias=pilot_alias)) \
            .options(joinedload(Flight.co_pilot)) \
            .outerjoin(Flight.club) \
            .options(contains_eager(Flight.club)) \
            .outerjoin(Flight.takeoff_airport) \
            .options(contains_eager(Flight.takeoff_airport)) \
            .outerjoin(Flight.model) \
            .options(contains_eager(Flight.model)) \
            .outerjoin((subq, Flight.comments))

        if date:
            flights = flights.filter(Flight.date_local == date)

        if pilot:
            flights = flights.filter(or_(Flight.pilot == pilot,
                                         Flight.co_pilot == pilot))
        if club:
            flights = flights.filter(Flight.club == club)

        if airport:
            flights = flights.filter(Flight.takeoff_airport == airport)

        if pinned:
            flights = flights.filter(Flight.id.in_(pinned))

        if filter is not None:
            flights = flights.filter(filter)

        if request.response_type == 'application/json':
            if not columns:
                columns = {
                    0: (Flight, 'date_local'),
                    1: (Flight, 'index_score'),
                    2: (pilot_alias, 'display_name'),
                    3: (Flight, 'olc_classic_distance'),
                    4: (Airport, 'name'),
                    5: (Club, 'name'),
                    6: (AircraftModel, 'name'),
                    7: (Flight, 'takeoff_time'),
                    8: (Flight, 'id'),
                    9: (Flight, 'num_comments'),
                }

            flights, response_dict = GetDatatableRecords(kw, flights, columns)

            aaData = []
            for flight, num_comments in flights:
                aaData.append(dict(takeoff_time=flight.takeoff_time.strftime('%H:%M'),
                                   landing_time=flight.landing_time.strftime('%H:%M'),
                                   date=flight.date_local.strftime('%d.%m.%Y'),
                                   date_formatted=format_date(flight.date_local),
                                   index_score=flight.index_score,
                                   olc_classic_distance=flight.olc_classic_distance,
                                   pilot_id=flight.pilot_id,
                                   pilot=flight.pilot and flight.pilot.display_name,
                                   co_pilot_id=flight.co_pilot_id,
                                   co_pilot=flight.co_pilot and flight.co_pilot.display_name,
                                   club_id=flight.club_id,
                                   club=flight.club and truncate(flight.club.name, 25),
                                   owner=flight.igc_file.owner.display_name,
                                   takeoff_airport=flight.takeoff_airport and flight.takeoff_airport.name,
                                   takeoff_airport_id=flight.takeoff_airport and flight.takeoff_airport.id,
                                   takeoff_airport_country_code=flight.takeoff_airport and flight.takeoff_airport.country_code.lower(),
                                   takeoff_airport_country_name=flight.takeoff_airport and country_name(flight.takeoff_airport.country_code),
                                   aircraft=(flight.model and flight.model.name) or (flight.igc_file.model and '[' + flight.igc_file.model + ']'),
                                   aircraft_reg=flight.registration or flight.igc_file.registration or "Unknown",
                                   flight_id=flight.id,
                                   num_comments=num_comments))

            return dict(response_dict, aaData=aaData)

        else:
            if not date:
                flights = flights.order_by(desc(Flight.date_local))

            flights_count = flights.count()
            if flights_count > int(config.get('skylines.lists.server_side', 250)):
                limit = int(config.get('skylines.lists.display_length', 50))
            else:
                limit = int(config.get('skylines.lists.server_side', 250))

            flights = flights.order_by(desc(Flight.index_score))
            flights = flights.limit(limit)
            return dict(tab=tab, date=date, pilot=pilot, club=club, airport=airport,
                        flights=flights, flights_count=flights_count)
Exemplo n.º 55
0
def get_time_series_analytics(request):
    import isodate
    from datetime import datetime
    start = request.GET.get("start", None)
    end = request.GET.get("end", None)
    interval = request.GET.get("interval", None)
    discussion = request.context._instance
    user_id = authenticated_userid(request) or Everyone
    try:
        if start:
            start = parse_datetime(start)
        if end:
            end = parse_datetime(end)
        if interval:
            interval = isodate.parse_duration(interval)
    except isodate.ISO8601Error as e:
        raise HTTPBadRequest(e)
    if interval and not start:
        raise HTTPBadRequest("You cannot define an interval and no start")
    if interval and not end:
        end = datetime.now()
    results = []

    from sqlalchemy import Table, MetaData, and_, case, cast, Float
    from sqlalchemy.exc import ProgrammingError
    import pprint
    import transaction
    with transaction.manager:
        metadata = MetaData(discussion.db.get_bind())  # make sure we are using the same connexion

        intervals_table = Table('temp_table_intervals_' + str(user_id), metadata,
            Column('interval_id', Integer, primary_key=True),
            Column('interval_start', DateTime, nullable=False),
            Column('interval_end', DateTime, nullable=False),
            prefixes=None if discussion.using_virtuoso else ['TEMPORARY']
        )
        try:
            intervals_table.drop()  # In case there is a leftover from a previous crash
        except ProgrammingError:
            pass
        intervals_table.create()
        interval_start = start
        intervals = []
        if interval:
            while interval_start < end:
                interval_end = min(interval_start + interval, end)
                intervals.append({'interval_start': interval_start, 'interval_end': interval_end})
                interval_start = interval_start + interval
            #pprint.pprint(intervals)
            discussion.db.execute(intervals_table.insert(), intervals)
        else:
            raise HTTPBadRequest("Please specify an interval")

        from assembl.models import Post, AgentProfile, AgentStatusInDiscussion, ViewPost

        # The posters
        post_subquery = discussion.db.query(intervals_table.c.interval_id,
            func.count(distinct(Post.id)).label('count_posts'),
            func.count(distinct(Post.creator_id)).label('count_post_authors'),
            # func.DB.DBA.BAG_AGG(Post.creator_id).label('post_authors'),
            # func.DB.DBA.BAG_AGG(Post.id).label('post_ids'),
            )
        post_subquery = post_subquery.outerjoin(Post, and_(Post.creation_date >= intervals_table.c.interval_start, Post.creation_date < intervals_table.c.interval_end, Post.discussion_id == discussion.id))
        post_subquery = post_subquery.group_by(intervals_table.c.interval_id)
        post_subquery = post_subquery.subquery()

        # The cumulative posters
        cumulative_posts_aliased = aliased(Post)
        cumulative_posts_subquery = discussion.db.query(intervals_table.c.interval_id,
            func.count(distinct(cumulative_posts_aliased.id)).label('count_cumulative_posts'),
            func.count(distinct(cumulative_posts_aliased.creator_id)).label('count_cumulative_post_authors')
            # func.DB.DBA.BAG_AGG(cumulative_posts_aliased.id).label('cumulative_post_ids')
            )
        cumulative_posts_subquery = cumulative_posts_subquery.outerjoin(cumulative_posts_aliased, and_(cumulative_posts_aliased.creation_date < intervals_table.c.interval_end, cumulative_posts_aliased.discussion_id == discussion.id))
        cumulative_posts_subquery = cumulative_posts_subquery.group_by(intervals_table.c.interval_id)
        cumulative_posts_subquery = cumulative_posts_subquery.subquery()

        # The post viewers
        postViewers = aliased(ViewPost)
        viewedPosts = aliased(Post)
        post_viewers_subquery = discussion.db.query(intervals_table.c.interval_id,
            func.count(distinct(postViewers.actor_id)).label('UNRELIABLE_count_post_viewers')
            )
        post_viewers_subquery = post_viewers_subquery.outerjoin(postViewers, and_(postViewers.creation_date >= intervals_table.c.interval_start, postViewers.creation_date < intervals_table.c.interval_end)).\
                        join(viewedPosts, and_(postViewers.post_id == viewedPosts.id, viewedPosts.discussion_id == discussion.id))
        post_viewers_subquery = post_viewers_subquery.group_by(intervals_table.c.interval_id)
        post_viewers_subquery = post_viewers_subquery.subquery()

        # The visitors
        firstTimeVisitorAgent = aliased(AgentStatusInDiscussion)
        visitors_subquery = discussion.db.query(intervals_table.c.interval_id,
            func.count(firstTimeVisitorAgent.id).label('count_first_time_logged_in_visitors'),
            # func.DB.DBA.BAG_AGG(firstTimeVisitorAgent.id).label('first_time_visitors')
            )
        visitors_subquery = visitors_subquery.outerjoin(firstTimeVisitorAgent, and_(firstTimeVisitorAgent.first_visit >= intervals_table.c.interval_start, firstTimeVisitorAgent.first_visit < intervals_table.c.interval_end, firstTimeVisitorAgent.discussion_id == discussion.id))
        visitors_subquery = visitors_subquery.group_by(intervals_table.c.interval_id)
        visitors_subquery = visitors_subquery.subquery()

        # The cumulative visitors
        cumulativeVisitorAgent = aliased(AgentStatusInDiscussion)
        cumulative_visitors_query = discussion.db.query(intervals_table.c.interval_id,
            func.count(distinct(cumulativeVisitorAgent.id)).label('count_cumulative_logged_in_visitors'),
            # func.DB.DBA.BAG_AGG(cumulativeVisitorAgent.id).label('first_time_visitors')
            )
        cumulative_visitors_query = cumulative_visitors_query.outerjoin(cumulativeVisitorAgent, and_(cumulativeVisitorAgent.first_visit < intervals_table.c.interval_end, cumulativeVisitorAgent.discussion_id == discussion.id))
        cumulative_visitors_query = cumulative_visitors_query.group_by(intervals_table.c.interval_id)
        cumulative_visitors_subquery = cumulative_visitors_query.subquery()
        # query = cumulative_visitors_query

        # The members (can go up and down...)  Assumes that first_subscribed is available
        commented_out = """ first_subscribed isn't yet filled in by assembl
        memberAgentStatus = aliased(AgentStatusInDiscussion)
        members_subquery = discussion.db.query(intervals_table.c.interval_id,
            func.count(memberAgentStatus.id).label('count_approximate_members')
            )
        members_subquery = members_subquery.outerjoin(memberAgentStatus, ((memberAgentStatus.last_unsubscribed >= intervals_table.c.interval_end) | (memberAgentStatus.last_unsubscribed.is_(None))) & ((memberAgentStatus.first_subscribed < intervals_table.c.interval_end) | (memberAgentStatus.first_subscribed.is_(None))) & (memberAgentStatus.discussion_id==discussion.id))
        members_subquery = members_subquery.group_by(intervals_table.c.interval_id)
        query = members_subquery
        members_subquery = members_subquery.subquery()
        """

        subscribersAgentStatus = aliased(AgentStatusInDiscussion)
        subscribers_query = discussion.db.query(intervals_table.c.interval_id,
                                                func.sum(
                                                    case([
                                                          (subscribersAgentStatus.last_visit == None, 0),
                                                          (and_(subscribersAgentStatus.last_visit < intervals_table.c.interval_end, subscribersAgentStatus.last_visit >= intervals_table.c.interval_start), 1)
                                                          ], else_=0)
                                                         ).label('retention_count_last_visit_in_period'),
                                                func.sum(
                                                    case([
                                                          (subscribersAgentStatus.first_visit == None, 0),
                                                          (and_(subscribersAgentStatus.first_visit < intervals_table.c.interval_end, subscribersAgentStatus.first_visit >= intervals_table.c.interval_start), 1)
                                                          ], else_=0)
                                                         ).label('recruitment_count_first_visit_in_period'),
                                                func.sum(
                                                    case([
                                                          (subscribersAgentStatus.first_subscribed == None, 0),
                                                          (and_(subscribersAgentStatus.first_subscribed < intervals_table.c.interval_end, subscribersAgentStatus.first_subscribed >= intervals_table.c.interval_start), 1)
                                                          ], else_=0)
                                                         ).label('UNRELIABLE_recruitment_count_first_subscribed_in_period'),
                                                func.sum(
                                                    case([
                                                          (subscribersAgentStatus.last_unsubscribed == None, 0),
                                                          (and_(subscribersAgentStatus.last_unsubscribed < intervals_table.c.interval_end, subscribersAgentStatus.last_unsubscribed >= intervals_table.c.interval_start), 1)
                                                          ], else_=0)
                                                         ).label('UNRELIABLE_retention_count_first_subscribed_in_period'),
                                            )
        subscribers_query = subscribers_query.outerjoin(subscribersAgentStatus, subscribersAgentStatus.discussion_id==discussion.id)
        subscribers_query = subscribers_query.group_by(intervals_table.c.interval_id)
        subscribers_subquery = subscribers_query.subquery()
        #query = subscribers_query

        combined_query = discussion.db.query(intervals_table,
                                             post_subquery,
                                             cumulative_posts_subquery,
                                             post_viewers_subquery,
                                             visitors_subquery,
                                             cumulative_visitors_subquery,
                                             case([
                                                   (cumulative_posts_subquery.c.count_cumulative_post_authors == 0, None),
                                                   (cumulative_posts_subquery.c.count_cumulative_post_authors != 0, (cast(post_subquery.c.count_post_authors, Float) / cast(cumulative_posts_subquery.c.count_cumulative_post_authors, Float)))
                                                   ]).label('fraction_cumulative_authors_who_posted_in_period'),
                                             case([
                                                   (cumulative_visitors_subquery.c.count_cumulative_logged_in_visitors == 0, None),
                                                   (cumulative_visitors_subquery.c.count_cumulative_logged_in_visitors != 0, (cast(post_subquery.c.count_post_authors, Float) / cast(cumulative_visitors_subquery.c.count_cumulative_logged_in_visitors, Float)))
                                                   ]).label('fraction_cumulative_logged_in_visitors_who_posted_in_period'),
                                             subscribers_subquery,
                                             )
        combined_query = combined_query.join(post_subquery, post_subquery.c.interval_id == intervals_table.c.interval_id)
        combined_query = combined_query.join(post_viewers_subquery, post_viewers_subquery.c.interval_id == intervals_table.c.interval_id)
        combined_query = combined_query.join(visitors_subquery, visitors_subquery.c.interval_id == intervals_table.c.interval_id)
        combined_query = combined_query.join(cumulative_visitors_subquery, cumulative_visitors_subquery.c.interval_id == intervals_table.c.interval_id)
        # combined_query = combined_query.join(members_subquery, members_subquery.c.interval_id==intervals_table.c.interval_id)
        combined_query = combined_query.join(subscribers_subquery, subscribers_subquery.c.interval_id==intervals_table.c.interval_id)
        combined_query = combined_query.join(cumulative_posts_subquery, cumulative_posts_subquery.c.interval_id == intervals_table.c.interval_id)

        query = combined_query
        query = query.order_by(intervals_table.c.interval_id)
        results = query.all()

        # pprint.pprint(results)
        # end of transaction

    intervals_table.drop()
    if not (request.GET.get('format', None) == 'csv' or
            request.accept == 'text/csv'):
            # json default
        from assembl.lib.json import DateJSONEncoder
        return Response(json.dumps(results, cls=DateJSONEncoder),
                        content_type='application/json')

    fieldnames = [
        "interval_id",
        "interval_start",
        "interval_end",
        "count_first_time_logged_in_visitors",
        "count_cumulative_logged_in_visitors",
        "fraction_cumulative_logged_in_visitors_who_posted_in_period",
        "count_post_authors",
        "count_cumulative_post_authors",
        "fraction_cumulative_authors_who_posted_in_period",
        "count_posts",
        "count_cumulative_posts",
        "recruitment_count_first_visit_in_period",
        "UNRELIABLE_recruitment_count_first_subscribed_in_period",
        "retention_count_last_visit_in_period",
        "UNRELIABLE_retention_count_first_subscribed_in_period",
        "UNRELIABLE_count_post_viewers",
    ]
    # otherwise assume csv
    return csv_response(fieldnames, [r._asdict() for r in results])
Exemplo n.º 56
0
    def assignRole(self, roleId, toRoleId):
        """
        @see: IRbacService.assignRole
        """
        child, parent = aliased(RoleNode), aliased(RoleNode)

        # check if the parent is in child subtree
        sql = self.session().query(child)
        sql = sql.join(parent, and_(child.left < parent.left, child.right > parent.right))
        sql = sql.filter(and_(child.role == toRoleId, parent.role == roleId))
        if sql.count() > 0:
            return False

        # check if has parent root
        sql = self.session().query(parent.id)
        sql = sql.join(child, and_(child.left > parent.left, child.right < parent.right))
        sql = sql.filter(child.role == roleId)
        parentCnt = sql.count()

        # get child roleNode
        childNode = self.session().query(RoleNode).filter(RoleNode.role == roleId).first()
        treeWidth = childNode.right - childNode.left + 1
        id = childNode.id

        # get the number of duplicates for parent
        sql = self.session().query(RoleNode).filter(RoleNode.role == toRoleId)
        treeCnt = sql.count()

        right, count = 0, 0
        sql = self.session().query(RoleNode.right).filter(RoleNode.role == toRoleId).order_by(RoleNode.right.desc())

        for (left,) in sql.all():
            if count == 0:
                # last interval
                gap = treeWidth * treeCnt

                sql = self.session().query(RoleNode).filter(RoleNode.left >= left)
                sql.update({RoleNode.left: RoleNode.left + gap}, False)

                sql = self.session().query(RoleNode).filter(RoleNode.right >= left)
                sql.update({RoleNode.right: RoleNode.right + gap}, False)

                self.session().flush()
                # TODO: check the impact of removing: result: not working
                self.session().commit()

                # get child roleNode
                childNode = self.session().query(RoleNode).get(id)

                gap = left - childNode.left

                # insert
                insert = InsertFromSelect(
                    tableFor(RoleNode),
                    "fk_role_id, lft, rgt",
                    select([RoleNode.role, RoleNode.left + gap, RoleNode.right + gap]).where(
                        and_(RoleNode.left >= childNode.left, RoleNode.right <= childNode.right)
                    ),
                )
                self.session().execute(insert)

                right = left
                count = count + 1
                continue

            gap = (treeCnt - count) * treeWidth

            sql = self.session().query(RoleNode).filter(and_(RoleNode.left >= left, RoleNode.left < right))
            sql.update({RoleNode.left: RoleNode.left + gap}, False)

            sql = self.session().query(RoleNode).filter(and_(RoleNode.right >= left, RoleNode.right < right))
            sql.update({RoleNode.right: RoleNode.right + gap}, False)

            self.session().flush()
            # TODO: check the impact of removing: result: not working
            self.session().commit()

            # get child roleNode
            sql = self.session().query(RoleNode).get(id)

            gap = gap + left - childNode.left

            # insert
            # TODO: for sqlite is nedeed: INSERT INTO t1 (columns) SELECT * FROM t1
            # change how is specified the list of columns
            insert = InsertFromSelect(
                tableFor(RoleNode),
                "fk_role_id, lft, rgt",
                select([RoleNode.role, RoleNode.left + gap, RoleNode.right + gap]).where(
                    and_(RoleNode.left >= childNode.left, RoleNode.right <= childNode.right)
                ),
            )
            self.session().execute(insert)

            right = left
            count = count + 1

        # if has root parent, delete from it
        if parentCnt == 1:
            # get child roleNode
            sql = self.session().query(RoleNode).get(id)

            left = childNode.left
            right = childNode.right

            # delete child subtree from root
            sql = self.session().query(RoleNode).filter(and_(RoleNode.left >= left, RoleNode.right <= right))
            sql.delete()

            # update lft
            sql = self.session().query(RoleNode).filter(RoleNode.left > left)
            sql.update({RoleNode.left: RoleNode.left - treeWidth}, False)

            # update rgt
            sql = self.session().query(RoleNode).filter(RoleNode.right > right)
            sql.update({RoleNode.right: RoleNode.right - treeWidth}, False)

        return True
Exemplo n.º 57
0
from sqlalchemy.sql.expression import func, or_
from sqlalchemy.sql.functions import current_timestamp
from sqlalchemy.sql.operators import desc_op
from superdesk.collaborator.meta.collaborator import CollaboratorMapped
from superdesk.person.meta.person import PersonMapped
from superdesk.person_icon.api.person_icon import IPersonIconService
from superdesk.post.api.post import IPostService, Post, QPostUnpublished
from superdesk.post.meta.type import PostTypeMapped
from livedesk.impl.blog_collaborator_group import updateLastAccessOn
from superdesk.source.meta.source import SourceMapped
from superdesk.verification.meta.verification import PostVerificationMapped
from superdesk.verification.meta.status import VerificationStatusMapped

# --------------------------------------------------------------------

UserPerson = aliased(PersonMapped)

@injected
@setup(IBlogPostService, name='blogPostService')
class BlogPostServiceAlchemy(SessionSupport, IBlogPostService):
    '''
    Implementation for @see: IBlogPostService
    '''

    postService = IPostService; wire.entity('postService')
    personIconService = IPersonIconService; wire.entity('personIconService')
    internal_source_type = 'internal'

    def __init__(self):
        '''
        Construct the blog post service.
Exemplo n.º 58
0
def _create_list(tab,
                 kw,
                 date=None,
                 pilot=None,
                 club=None,
                 airport=None,
                 pinned=None,
                 filter=None,
                 columns=None):
    pilot_alias = aliased(User, name='pilot')
    owner_alias = aliased(User, name='owner')

    subq = db.session \
        .query(FlightComment.flight_id, func.count('*').label('count')) \
        .group_by(FlightComment.flight_id).subquery()

    flights = db.session.query(Flight, subq.c.count) \
        .join(Flight.igc_file) \
        .options(contains_eager(Flight.igc_file)) \
        .join(owner_alias, IGCFile.owner) \
        .options(contains_eager(Flight.igc_file, IGCFile.owner, alias=owner_alias)) \
        .outerjoin(pilot_alias, Flight.pilot) \
        .options(contains_eager(Flight.pilot, alias=pilot_alias)) \
        .options(joinedload(Flight.co_pilot)) \
        .outerjoin(Flight.club) \
        .options(contains_eager(Flight.club)) \
        .outerjoin(Flight.takeoff_airport) \
        .options(contains_eager(Flight.takeoff_airport)) \
        .outerjoin(Flight.model) \
        .options(contains_eager(Flight.model)) \
        .outerjoin((subq, Flight.comments))

    if date:
        flights = flights.filter(Flight.date_local == date)

    if pilot:
        flights = flights.filter(
            or_(Flight.pilot == pilot, Flight.co_pilot == pilot))
    if club:
        flights = flights.filter(Flight.club == club)

    if airport:
        flights = flights.filter(Flight.takeoff_airport == airport)

    if pinned:
        flights = flights.filter(Flight.id.in_(pinned))

    if filter is not None:
        flights = flights.filter(filter)

    if request.is_xhr:
        if not columns:
            columns = {
                0: (Flight, 'date_local'),
                1: (Flight, 'index_score'),
                2: (pilot_alias, 'name'),
                3: (Flight, 'olc_classic_distance'),
                4: (Airport, 'name'),
                5: (Club, 'name'),
                6: (AircraftModel, 'name'),
                7: (Flight, 'takeoff_time'),
                8: (Flight, 'id'),
                9: (Flight, 'num_comments'),
            }

        flights, response_dict = GetDatatableRecords(kw, flights, columns)

        aaData = []
        for flight, num_comments in flights:
            aaData.append(
                dict(takeoff_time=flight.takeoff_time.strftime('%H:%M'),
                     landing_time=flight.landing_time.strftime('%H:%M'),
                     date=flight.date_local.strftime('%d.%m.%Y'),
                     date_formatted=format_date(flight.date_local),
                     index_score=format_decimal(flight.index_score,
                                                format='0'),
                     olc_classic_distance=flight.olc_classic_distance,
                     pilot_id=flight.pilot_id,
                     pilot=flight.pilot and flight.pilot.name,
                     pilot_name=flight.pilot_name,
                     co_pilot_id=flight.co_pilot_id,
                     co_pilot=flight.co_pilot and flight.co_pilot.name,
                     co_pilot_name=flight.co_pilot_name,
                     club_id=flight.club_id,
                     club=flight.club and truncate(flight.club.name, 25),
                     owner=flight.igc_file.owner.name,
                     takeoff_airport=flight.takeoff_airport
                     and flight.takeoff_airport.name,
                     takeoff_airport_id=flight.takeoff_airport
                     and flight.takeoff_airport.id,
                     takeoff_airport_country_code=flight.takeoff_airport
                     and flight.takeoff_airport.country_code.lower(),
                     takeoff_airport_country_name=flight.takeoff_airport
                     and country_name(flight.takeoff_airport.country_code),
                     aircraft=(flight.model and flight.model.name)
                     or (flight.igc_file.model
                         and '[' + flight.igc_file.model + ']'),
                     aircraft_reg=flight.registration
                     or flight.igc_file.registration or "Unknown",
                     flight_id=flight.id,
                     num_comments=num_comments))

        return jsonify(aaData=aaData, **response_dict)

    else:
        if not date:
            flights = flights.order_by(Flight.date_local.desc())

        flights_count = flights.count()
        if flights_count > int(
                current_app.config.get('SKYLINES_LISTS_SERVER_SIDE', 250)):
            limit = int(
                current_app.config.get('SKYLINES_LISTS_DISPLAY_LENGTH', 50))
        else:
            limit = int(
                current_app.config.get('SKYLINES_LISTS_SERVER_SIDE', 250))

        flights = flights.order_by(Flight.index_score.desc())
        flights = flights.limit(limit)

        return render_template('flights/list.jinja',
                               tab=tab,
                               date=date,
                               pilot=pilot,
                               club=club,
                               airport=airport,
                               flights=flights,
                               flights_count=flights_count)
Exemplo n.º 59
0
def set_recent_outings(waypoint, lang):
    """Set last 10 outings on routes associated to the given waypoint.
    """
    t_outing_route = aliased(Association, name='a1')
    t_route_wp = aliased(Association, name='a2')
    t_route = aliased(Document, name='r')
    with_query_waypoints = _get_select_children(waypoint)

    recent_outings = DBSession.query(Outing). \
        filter(Outing.redirects_to.is_(None)). \
        join(
            t_outing_route,
            Outing.document_id == t_outing_route.child_document_id). \
        join(
            t_route,
            and_(
                t_outing_route.parent_document_id == t_route.document_id,
                t_route.type == ROUTE_TYPE)). \
        join(
            t_route_wp,
            t_route_wp.child_document_id == t_route.document_id). \
        join(
            with_query_waypoints,
            with_query_waypoints.c.document_id == t_route_wp.parent_document_id
        ). \
        options(load_only(
            Outing.document_id, Outing.activities, Outing.date_start,
            Outing.date_end, Outing.version, Outing.protected)). \
        options(joinedload(Outing.locales).load_only(
            DocumentLocale.lang, DocumentLocale.title,
            DocumentLocale.version)). \
        order_by(Outing.date_end.desc()). \
        limit(NUM_RECENT_OUTINGS). \
        all()

    set_author(recent_outings, None)
    if lang is not None:
        set_best_locale(recent_outings, lang)

    total = DBSession.query(Outing.document_id). \
        filter(Outing.redirects_to.is_(None)). \
        join(
            t_outing_route,
            Outing.document_id == t_outing_route.child_document_id). \
        join(
            t_route,
            and_(
                t_outing_route.parent_document_id == t_route.document_id,
                t_route.type == ROUTE_TYPE)). \
        join(
            t_route_wp,
            t_route_wp.child_document_id == t_route.document_id). \
        join(
            with_query_waypoints,
            with_query_waypoints.c.document_id == t_route_wp.parent_document_id
        ). \
        count()

    waypoint.associations['recent_outings'] = {
        'total': total,
        'outings': [
            to_json_dict(outing, schema_association_outing)
            for outing in recent_outings
        ]
    }
Exemplo n.º 60
0
def _create_list(tab, kw, date=None, pilot=None, club=None, airport=None,
                 pinned=None, filter=None, columns=None):
    pilot_alias = aliased(User, name='pilot')
    owner_alias = aliased(User, name='owner')

    subq = db.session \
        .query(FlightComment.flight_id, func.count('*').label('count')) \
        .group_by(FlightComment.flight_id).subquery()

    flights = db.session.query(Flight, subq.c.count) \
        .join(Flight.igc_file) \
        .options(contains_eager(Flight.igc_file)) \
        .join(owner_alias, IGCFile.owner) \
        .options(contains_eager(Flight.igc_file, IGCFile.owner, alias=owner_alias)) \
        .outerjoin(pilot_alias, Flight.pilot) \
        .options(contains_eager(Flight.pilot, alias=pilot_alias)) \
        .options(joinedload(Flight.co_pilot)) \
        .outerjoin(Flight.club) \
        .options(contains_eager(Flight.club)) \
        .outerjoin(Flight.takeoff_airport) \
        .options(contains_eager(Flight.takeoff_airport)) \
        .outerjoin(Flight.model) \
        .options(contains_eager(Flight.model)) \
        .outerjoin((subq, Flight.comments))

    if date:
        flights = flights.filter(Flight.date_local == date)

    if pilot:
        flights = flights.filter(or_(Flight.pilot == pilot,
                                     Flight.co_pilot == pilot))
    if club:
        flights = flights.filter(Flight.club == club)

    if airport:
        flights = flights.filter(Flight.takeoff_airport == airport)

    if pinned:
        flights = flights.filter(Flight.id.in_(pinned))

    if filter is not None:
        flights = flights.filter(filter)

    if request.is_xhr:
        if not columns:
            columns = {
                0: (Flight, 'date_local'),
                1: (Flight, 'index_score'),
                2: (pilot_alias, 'name'),
                3: (Flight, 'olc_classic_distance'),
                4: (Airport, 'name'),
                5: (Club, 'name'),
                6: (AircraftModel, 'name'),
                7: (Flight, 'takeoff_time'),
                8: (Flight, 'id'),
                9: (Flight, 'num_comments'),
            }

        flights, response_dict = GetDatatableRecords(kw, flights, columns)

        aaData = []
        for flight, num_comments in flights:
            aaData.append(dict(takeoff_time=flight.takeoff_time.strftime('%H:%M'),
                               landing_time=flight.landing_time.strftime('%H:%M'),
                               date=flight.date_local.strftime('%d.%m.%Y'),
                               date_formatted=format_date(flight.date_local),
                               index_score=format_decimal(flight.index_score, format='0'),
                               olc_classic_distance=flight.olc_classic_distance,
                               pilot_id=flight.pilot_id,
                               pilot=flight.pilot and flight.pilot.name,
                               pilot_name=flight.pilot_name,
                               co_pilot_id=flight.co_pilot_id,
                               co_pilot=flight.co_pilot and flight.co_pilot.name,
                               co_pilot_name=flight.co_pilot_name,
                               club_id=flight.club_id,
                               club=flight.club and truncate(flight.club.name, 25),
                               owner=flight.igc_file.owner.name,
                               takeoff_airport=flight.takeoff_airport and flight.takeoff_airport.name,
                               takeoff_airport_id=flight.takeoff_airport and flight.takeoff_airport.id,
                               takeoff_airport_country_code=flight.takeoff_airport and flight.takeoff_airport.country_code.lower(),
                               takeoff_airport_country_name=flight.takeoff_airport and country_name(flight.takeoff_airport.country_code),
                               aircraft=(flight.model and flight.model.name) or (flight.igc_file.model and '[' + flight.igc_file.model + ']'),
                               aircraft_reg=flight.registration or flight.igc_file.registration or "Unknown",
                               flight_id=flight.id,
                               num_comments=num_comments))

        return jsonify(aaData=aaData, **response_dict)

    else:
        if not date:
            flights = flights.order_by(Flight.date_local.desc())

        flights_count = flights.count()
        if flights_count > int(current_app.config.get('SKYLINES_LISTS_SERVER_SIDE', 250)):
            limit = int(current_app.config.get('SKYLINES_LISTS_DISPLAY_LENGTH', 50))
        else:
            limit = int(current_app.config.get('SKYLINES_LISTS_SERVER_SIDE', 250))

        flights = flights.order_by(Flight.index_score.desc())
        flights = flights.limit(limit)

        return render_template('flights/list.jinja',
                               tab=tab, date=date, pilot=pilot, club=club,
                               airport=airport, flights=flights,
                               flights_count=flights_count)