Exemplo n.º 1
0
    def deletePostsByTags(self, blacklist, whitelist):
        if not blacklist:
            return 0

        s = self.DBsession()

        subq = s.query(Post.id).distinct().join(Post.tags)
        subq = subq.filter(Tag.name.in_(whitelist))

        q = s.query(Post.id).distinct().join(Post.tags)
        q = q.filter(Tag.name.in_(blacklist)).except_(subq)

        d = s.query(Post).filter(Post.id.in_(q))
        post_count = d.delete(synchronize_session='fetch')

        # delete the image refs without posts from the database
        q = s.query(Post.image_id).distinct()
        d = s.query(Image.id).filter(not_(Image.id.in_(q)))
        img_count = d.delete(synchronize_session='fetch')

        q = s.query(Tag.id).join(Post.tags)
        d = s.query(Tag.id).filter(not_(Tag.id.in_(q)))
        tag_count = d.delete(synchronize_session='fetch')

        s.commit()
        return (post_count, img_count, tag_count)
Exemplo n.º 2
0
def check_field_association(session, tid, request):
    """
    Transaction to check consistency of field association

    :param session: The ORM session
    :param tid: The tenant ID
    :param request: The request data to be verified
    """
    if request.get('fieldgroup_id', '') and session.query(models.Field).filter(
            models.Field.id == request['fieldgroup_id'],
            models.Field.tid != tid).count():
        raise errors.InputValidationError()

    if request.get('template_id', '') and session.query(models.Field).filter(
            models.Field.id == request['template_id'],
            not_(models.Field.tid.in_(set([1, tid])))).count():
        raise errors.InputValidationError()

    if request.get('step_id', '') and session.query(models.Field).filter(
            models.Step.id == request['step_id'], models.Questionnaire.id
            == models.Step.questionnaire_id,
            not_(models.Questionnaire.tid.in_(set([1, tid])))).count():
        raise errors.InputValidationError()

    if request.get('fieldgroup_id', ''):
        ancestors = set(fieldtree_ancestors(session, request['fieldgroup_id']))
        if request['id'] == request['fieldgroup_id'] or request[
                'id'] in ancestors:
            raise errors.InputValidationError(
                "Provided field association would cause recursion loop")
Exemplo n.º 3
0
 def constructQuery(self, context):
     session= Session()
     trusted=removeSecurityProxy(context)
     parliament_id = self._get_parliament_id(context)
     item_id = getattr(context, self.value_field, None)
     trusted = removeSecurityProxy(context)
     existing_item_ids = [assn.item_id for assn in trusted.values()]
     if item_id:
         query = session.query(domain.ParliamentaryItem).filter(
             domain.ParliamentaryItem.parliamentary_item_id ==
             item_id)
     else:
         query = session.query(domain.ParliamentaryItem).filter(
                 sql.and_(
                     sql.not_(domain.ParliamentaryItem.status.in_(
                             _assignable_state_ids
                         )
                     ),
                     sql.not_(
                         domain.ParliamentaryItem.parliamentary_item_id.in_(
                             existing_item_ids
                         )
                     ),
                     domain.ParliamentaryItem.parliament_id == parliament_id
                 )
             )
     return query
Exemplo n.º 4
0
def abort_dead_recipes(*args):
    filters = [not_(DistroTree.lab_controller_assocs.any())]
    if _virt_enabled():
        filters.append(
            and_(not_(Recipe.systems.any()),
                 Recipe.virt_status != RecipeVirtStatus.possible))
    else:
        filters.append(not_(Recipe.systems.any()))
    recipes = MachineRecipe.query\
            .join(MachineRecipe.recipeset).join(RecipeSet.job)\
            .filter(Job.dirty_version == Job.clean_version)\
            .outerjoin(Recipe.distro_tree)\
            .filter(Recipe.status == TaskStatus.queued)\
            .filter(or_(*filters))
    if not recipes.count():
        return False
    log.debug("Entering abort_dead_recipes")
    for recipe_id, in recipes.values(MachineRecipe.id):
        session.begin()
        try:
            abort_dead_recipe(recipe_id)
            session.commit()
        except exceptions.Exception, e:
            log.exception('Error in abort_dead_recipe(%s)', recipe_id)
            session.rollback()
        finally:
Exemplo n.º 5
0
        def _get_query_for_editors():
            """
            Returns a query that selects versions available to editors.
            """
            active_versions = Session.query(
                mappedClass.version,
                mappedClass.fk_status
            ).\
                filter(mappedClass.identifier == uid).\
                filter(or_(
                    mappedClass.fk_status == 2, mappedClass.fk_status == 3))

            own_filters = and_(
                mappedClass.identifier == uid,
                not_(mappedClass.fk_status == 2),
                not_(mappedClass.fk_status == 3),
                User.username == self.request.user.username)
            own_versions = Session.query(
                mappedClass.version,
                mappedClass.fk_status
            ).\
                join(Changeset).\
                join(User).\
                filter(*own_filters)
            return active_versions.union(own_versions)
Exemplo n.º 6
0
def abort_dead_recipes(*args):
    filters = [not_(DistroTree.lab_controller_assocs.any())]
    if _virt_enabled():
        filters.append(and_(not_(Recipe.systems.any()),
                Recipe.virt_status != RecipeVirtStatus.possible))
    else:
        filters.append(not_(Recipe.systems.any()))
    recipes = MachineRecipe.query\
            .join(MachineRecipe.recipeset).join(RecipeSet.job)\
            .filter(Job.dirty_version == Job.clean_version)\
            .outerjoin(Recipe.distro_tree)\
            .filter(Recipe.status == TaskStatus.queued)\
            .filter(or_(*filters))
    if not recipes.count():
        return False
    log.debug("Entering abort_dead_recipes")
    for recipe_id, in recipes.values(MachineRecipe.id):
        session.begin()
        try:
            abort_dead_recipe(recipe_id)
            session.commit()
        except exceptions.Exception, e:
            log.exception('Error in abort_dead_recipe(%s)', recipe_id)
            session.rollback()
        finally:
Exemplo n.º 7
0
def check_field_association(session, tid, field_dict):
    if field_dict.get('fieldgroup_id', '') and session.query(
            models.Field).filter(
                models.Field.id == field_dict['fieldgroup_id'],
                models.Field.tid != tid).count():
        raise errors.InputValidationError()

    if field_dict.get('template_id',
                      '') and session.query(models.Field).filter(
                          models.Field.id == field_dict['template_id'],
                          not_(models.Field.tid.in_(set([1, tid])))).count():
        raise errors.InputValidationError()

    if field_dict.get('step_id', '') and session.query(models.Field).filter(
            models.Step.id == field_dict['step_id'], models.Questionnaire.id
            == models.Step.questionnaire_id,
            not_(models.Questionnaire.tid.in_(set([1, tid])))).count():
        raise errors.InputValidationError()

    if field_dict.get('fieldgroup_id', ''):
        ancestors = set(
            fieldtree_ancestors(session, field_dict['fieldgroup_id']))
        if field_dict['id'] == field_dict['fieldgroup_id'] or field_dict[
                'id'] in ancestors:
            raise errors.InputValidationError(
                "Provided field association would cause recursion loop")
Exemplo n.º 8
0
def abort_dead_recipes(*args):
    work_done = False
    with session.begin():
        filters = [not_(DistroTree.lab_controller_assocs.any())]
        if _virt_enabled():
            filters.append(
                and_(not_(Recipe.systems.any()),
                     Recipe.virt_status != RecipeVirtStatus.possible))
        else:
            filters.append(not_(Recipe.systems.any()))
        recipes = MachineRecipe.query\
                .join(MachineRecipe.recipeset).join(RecipeSet.job)\
                .filter(Job.dirty_version == Job.clean_version)\
                .outerjoin(Recipe.distro_tree)\
                .filter(Recipe.status == TaskStatus.queued)\
                .filter(or_(*filters))
        recipe_ids = [
            recipe_id for recipe_id, in recipes.values(MachineRecipe.id)
        ]
    if recipe_ids:
        log.debug('Aborting dead recipes [%s ... %s] (%d total)',
                  recipe_ids[0], recipe_ids[-1], len(recipe_ids))
    for recipe_id in recipe_ids:
        session.begin()
        try:
            abort_dead_recipe(recipe_id)
            session.commit()
        except exceptions.Exception, e:
            log.exception('Error in abort_dead_recipe(%s)', recipe_id)
            session.rollback()
        finally:
Exemplo n.º 9
0
 def compatible_with(cls, distro=None, osmajor=None):  #pylint: disable=E0213
     if distro:
         return not_(
             Task.excluded_osmajors.any(
                 OSMajor.id == distro.osversion.osmajor.id))
     if osmajor:
         return not_(Task.excluded_osmajors.any(OSMajor.id == osmajor.id))
     return true()
Exemplo n.º 10
0
 def resume_work(self,context):
     """
     on resume setting value from task context
     """
     execution_context=context["execution_context"]
     WRK_LOGGER.debug("RESUMING WORKER for :"+self.worker )
     if execution_context:
         self.start_time=execution_context.get("start_time",datetime.utcnow())
         self.worker_ids=execution_context.get("worker_ids",[])
         self.sp_list=execution_context.get("sp_list",[])
         ##validate all the worker ids are taken care of
         ets = DBSession.query(EntityTasks).filter(EntityTasks.worker==self.worker).\
                                 filter(not_(EntityTasks.worker_id.in_(self.worker_ids))).all()
         if len(ets) > 0:
             xtra_work_ids = [et.worker_id for et in ets]
             WRK_LOGGER.error("GOT ENT Tasks different from execution_context :"+self.worker+\
             ": CONTEXT WORKERS : "+str(self.worker_ids) +": XTRA WORKERS :"+str(xtra_work_ids))
             r = DBSession.query(EntityTasks.entity_id).\
                     filter(EntityTasks.worker_id.in_(xtra_work_ids)).\
                     filter(EntityTasks.worker==self.worker).\
                     update(values=dict(worker_id=None,finished=True,end_time=datetime.utcnow()))
             transaction.commit()
             WRK_LOGGER.debug("Cleaned Up entity_tasks . worker:rows : "+self.worker+":"+str(r))
             
     WRK_LOGGER.debug("RESUMING WORKER for :"+self.worker+":"+str(self.start_time)+":"+str(self.worker_ids) )
     self.do_work()
 def _Not(self, node, fieldname):
     expressions = []
     self.in_not = True
     for subnode in node:
         expressions.append(self._eval(subnode, fieldname))
     self.in_not = False
     return not_(*expressions)
Exemplo n.º 12
0
def db_update_fieldoptions(session, field_id, options, language):
    """
    Transaction to update a set of options at once

    :param session: An ORM session
    :param field_id: The field on which the options are set
    :param options: The list of options to be updated
    :param language: The language of the request
    """
    options_ids = [
        db_update_fieldoption(session, field_id, option['id'], option,
                              language, idx)
        for idx, option in enumerate(options)
    ]

    if not options_ids:
        return

    subquery = session.query(models.FieldOption.id) \
                      .filter(models.FieldOption.field_id == field_id,
                              not_(models.FieldOption.id.in_(options_ids))) \
                      .subquery()

    session.query(models.FieldOption).filter(
        models.FieldOption.id.in_(subquery)).delete(synchronize_session=False)
Exemplo n.º 13
0
    def _fetch_db(result):
        LOGGER.debug('Open aggregate for item #%d: %r', item_id, result)
        # Si l'info se trouvait dans le cache,
        # on utilise cette valeur là.
        if result is not None:
            # La valeur 0 est utilisée à la place de None
            # dans le cache. On fait la conversion inverse ici.
            if not result:
                return None
            return result

        # Sinon on récupère l'information
        # depuis la base de données...
        state_ok = StateName.statename_to_value('OK')
        state_up = StateName.statename_to_value('UP')
        aggregate = database.run(
            DBSession.query(
                CorrEvent.idcorrevent
            ).join(
                (Event, CorrEvent.idcause == Event.idevent)
            ).filter(
                # Ici, on ne prend pas en compte l'état d'acquittement :
                # on n'agrège jamais une alerte dans un agrégat OK/UP
                # (voir le ticket #1027 pour plus d'information).
                not_(Event.current_state.in_([state_ok, state_up]))
            ).filter(Event.idsupitem == item_id
            ).scalar)

        # ...et on met à jour le cache avant de retourner l'ID.
        # NB: la valeur 0 est utilisée à la place de None pour que
        # le cache puisse réellement servir à l'appel suivant.
        ctx.setShared('open_aggr:%d' % item_id, aggregate or 0)
        return aggregate
Exemplo n.º 14
0
    def _delete_all_section(self, section):
        query = (self.session.query(Features).filter(
            Features.category == section))
        if section == 'general':
            query = query.filter(not_(Features.var_name.in_(PARKING_OPTIONS)))

        if section == 'featuremap':
            query = query.filter(
                not_(Features.var_name.in_(FUNC_KEY_FEATUREMAP_FOREIGN_KEY)))

        if section == 'applicationmap':
            query = query.filter(
                not_(Features.var_name.in_(
                    FUNC_KEY_APPLICATIONMAP_FOREIGN_KEY)))

        query.delete(synchronize_session=False)
Exemplo n.º 15
0
def provision_scheduled_recipesets(*args):
    """
    if All recipes in a recipeSet are in Scheduled state then move them to
     Running.
    """
    recipesets = RecipeSet.query.join(RecipeSet.job)\
            .filter(and_(Job.dirty_version == Job.clean_version, Job.deleted == None))\
            .filter(not_(RecipeSet.recipes.any(
                Recipe.status != TaskStatus.scheduled)))
    if not recipesets.count():
        return False
    log.debug("Entering provision_scheduled_recipesets")
    for rs_id, in recipesets.values(RecipeSet.id):
        log.info("scheduled_recipesets: RS:%s" % rs_id)
        session.begin()
        try:
            provision_scheduled_recipeset(rs_id)
            session.commit()
        except exceptions.Exception:
            log.exception('Error in provision_scheduled_recipeset(%s)', rs_id)
            session.rollback()
        finally:
            session.close()
    log.debug("Exiting provision_scheduled_recipesets")
    return True
Exemplo n.º 16
0
    def clear_data_for_current_system(self):
        p = alias(FileStorage)
        ps = alias(FileStorage)
        pl = alias(RpmDetailPatchStorageLink)
        pls = alias(RpmDetailPatchStorageLink)
        rd = alias(RpmDetail)
        s = alias(System)

        delete_links_sql = delete(pl).where(
            exists(
                select([1]).select_from(
                    pls.join(
                        rd, pls.c.rpm_detail_id == rd.c.rpm_detail_id).join(
                            s, rd.c.system_id == s.c.system_id)).where(
                                s.c.system_id == self.system.system_id).where(
                                    pl.c.id == pls.c.id)))

        delete_patches_sql = delete(p).where(
            not_(
                exists(
                    select([1]).select_from(
                        pl.join(ps, pl.c.file_storage_id == ps.c.id)).where(
                            p.c.id == ps.c.id))))

        result_links = self._session.execute(delete_links_sql)

        if result_links.rowcount:
            log.info(f"Removed {result_links.rowcount} previous patch links")

        result_patches = self._session.execute(delete_patches_sql)

        if result_patches.rowcount:
            log.info(f"Removed {result_patches.rowcount} previous patches")
Exemplo n.º 17
0
    def resume_work(self, context):
        """
        on resume setting value from task context
        """
        execution_context = context["execution_context"]
        WRK_LOGGER.debug("RESUMING WORKER for :" + self.worker)
        if execution_context:
            self.start_time = execution_context.get("start_time",
                                                    datetime.utcnow())
            self.worker_ids = execution_context.get("worker_ids", [])
            self.sp_list = execution_context.get("sp_list", [])
            ##validate all the worker ids are taken care of
            ets = DBSession.query(EntityTasks).filter(EntityTasks.worker==self.worker).\
                                    filter(not_(EntityTasks.worker_id.in_(self.worker_ids))).all()
            if len(ets) > 0:
                xtra_work_ids = [et.worker_id for et in ets]
                WRK_LOGGER.error("GOT ENT Tasks different from execution_context :"+self.worker+\
                ": CONTEXT WORKERS : "+str(self.worker_ids) +": XTRA WORKERS :"+str(xtra_work_ids))
                r = DBSession.query(EntityTasks.entity_id).\
                        filter(EntityTasks.worker_id.in_(xtra_work_ids)).\
                        filter(EntityTasks.worker==self.worker).\
                        update(values=dict(worker_id=None,finished=True,end_time=datetime.utcnow()))
                transaction.commit()
                WRK_LOGGER.debug("Cleaned Up entity_tasks . worker:rows : " +
                                 self.worker + ":" + str(r))

        WRK_LOGGER.debug("RESUMING WORKER for :" + self.worker + ":" +
                         str(self.start_time) + ":" + str(self.worker_ids))
        self.do_work()
Exemplo n.º 18
0
 def getActionsRoot(self, **options):
     '''
     @see: IActionManagerService.getActionsRoot
     '''
     sql = self.session().query(ActionMapped.Path).filter(
         not_(ActionMapped.Path.like('%.%')))
     return iterateCollection(sql, **options)
Exemplo n.º 19
0
def managegroups(request):
    params = {'action': 'manage_groups', 'form': None}
    form, session = None, DBSession
    is_a_post = request.method == 'POST'
    adding = 'groupname' in request.POST
    deleting = request.params.get('group_action', '') == 'delete'
    settings = request.registry.settings
    default_groups_names = [a[0] for a in get_default_groups(settings)]
    if is_a_post and deleting:
        items = [a[1] for a in request.POST.items() if a[0] == 'delete']

        todelete = session.query(AuthGroup).filter(
            se.and_(AuthGroup.id.in_(items),
                    se.not_(AuthGroup.name.in_(default_groups_names)))).all()
        noecho = [session.delete(i) for i in todelete]
        request.session.flash(
            _('Groups %s have been deleted') %
            (', '.join([a.name for a in todelete])), 'info')
    add_form = GroupForm(request.POST)
    if is_a_post and adding:
        if add_form.validate():
            try:
                group = add_form.save()
                add_form = GroupForm()
                flash(_('Added group : %s' % group.name, 'info'))
            except Exception, e:
                flash(_('Problem adding group : %s' % e, 'error'))
Exemplo n.º 20
0
def managegroups(request):
    params = {'action': 'manage_groups', 'form': None}
    form, session = None, DBSession
    is_a_post = request.method == 'POST'
    adding = 'groupname' in request.POST
    deleting = request.params.get('group_action', '') == 'delete'
    settings = request.registry.settings
    default_groups_names = [a[0] 
                            for a in get_default_groups(settings)] 
    if is_a_post and deleting:
        items = [a[1]
                 for a in request.POST.items()
                 if a[0] == 'delete']

        todelete = session.query( AuthGroup).filter(
            se.and_(
                AuthGroup.id.in_(items),
                se.not_(AuthGroup.name.in_(default_groups_names))
            )).all()
        noecho = [session.delete(i) for i in todelete]
        request.session.flash(_('Groups %s have been deleted') % (
            ', '.join([a.name for a in todelete])), 'info')
    add_form = GroupForm(request.POST)
    if is_a_post and adding:
        if add_form.validate():
            try:
                group = add_form.save()
                add_form = GroupForm()
                flash(_('Added group : %s' % group.name, 'info'))
            except Exception, e:
                flash(_('Problem adding group : %s' % e, 'error'))
Exemplo n.º 21
0
def provision_scheduled_recipesets(*args):
    """
    if All recipes in a recipeSet are in Scheduled state then move them to
     Running.
    """
    recipesets = RecipeSet.query.join(RecipeSet.job)\
            .filter(and_(Job.dirty_version == Job.clean_version, Job.deleted == None))\
            .filter(not_(RecipeSet.recipes.any(
                Recipe.status != TaskStatus.scheduled)))
    if not recipesets.count():
        return False
    log.debug("Entering provision_scheduled_recipesets")
    for rs_id, in recipesets.values(RecipeSet.id):
        log.info("scheduled_recipesets: RS:%s" % rs_id)
        session.begin()
        try:
            provision_scheduled_recipeset(rs_id)
            session.commit()
        except exceptions.Exception:
            log.exception('Error in provision_scheduled_recipeset(%s)', rs_id)
            session.rollback()
        finally:
            session.close()
    log.debug("Exiting provision_scheduled_recipesets")
    return True
Exemplo n.º 22
0
def db_update_fieldoptions(session, tid, field_id, options, language):
    """
    Update options

    :param session: the session on which perform queries.
    :param field_id: the field_id on wich bind the provided options
    :param language: the language of the option definition dict
    """
    options_ids = [
        db_update_fieldoption(session, tid, field_id, option['id'], option,
                              language, idx)
        for idx, option in enumerate(options)
    ]

    if options_ids:
        ids = [
            x[0] for x in session.query(models.FieldOption.id).filter(
                models.FieldOption.field_id == field_id,
                not_(models.FieldOption.id.in_(
                    options_ids)), models.FieldOption.field_id ==
                models.Field.id, models.Field.tid == tid)
        ]
        if ids:
            session.query(models.FieldOption).filter(
                models.FieldOption.id.in_(ids)).delete(
                    synchronize_session='fetch')
Exemplo n.º 23
0
    def index(self):
        self.template = 'index.phtml'

        # top rated
        top_rated = Rating.top_rated(self.session, limit=6)
        self.view['top_rated'] = top_rated

        # last rated
        last_rated = self.view['last_rated']
        shuffle(last_rated)
        last_rated[5:] = []
        ids = map(itemgetter1, self.view['last_rated'])
        movies = self.session.query(Movie).filter(Movie.id.in_(ids)).all()
        movies = dict((m.id, m) for m in movies)
        self.view['movies'] = movies

        # recent users
        self.view['recent_users'] = User.recent(self.session, limit=8)

        # recent ratings
        already_shown = set(ids)
        already_shown.update((m.id for m, _ in top_rated))
        recent = self.session.query(Rating)\
                     .options(joinedload(Rating.movie))\
                     .options(joinedload(Rating.user))\
                     .filter(not_(Rating.movie_id.in_(already_shown)))\
                     .order_by(Rating.rated.desc()).limit(15).all()
        shuffle(recent)
        recent[10:] = []
        self.view['recent_ratings'] = recent
Exemplo n.º 24
0
 def getPotential(self,
                  blogId,
                  excludeSources=True,
                  offset=None,
                  limit=None,
                  detailed=True,
                  qu=None,
                  qs=None):
     '''
     @see: IBlogCollaboratorService.getPotential
     '''
     sqlBlog = self.session().query(BlogCollaboratorMapped.Id).filter(
         BlogCollaboratorMapped.Blog == blogId)
     sql = self.session().query(CollaboratorMapped)
     sql = sql.join(UserMapped, CollaboratorMapped.User == UserMapped.Id)
     sql = sql.join(SourceMapped,
                    SourceMapped.Id == CollaboratorMapped.Source)
     sql = sql.filter(not_(CollaboratorMapped.Id.in_(sqlBlog)))
     sql = sql.filter(UserMapped.Active == True)
     sql = sql.filter(UserMapped.Type == self.default_user_type_key)
     sql = sql.filter(SourceMapped.Name == self.internal_source_name)
     sql = sql.order_by(CollaboratorMapped.Name)
     if excludeSources: sql = sql.filter(CollaboratorMapped.User != None)
     if qu: sql = buildQuery(sql, qu, UserMapped)
     if qs: sql = buildQuery(sql, qs, SourceMapped)
     sqlLimit = buildLimits(sql, offset, limit)
     if detailed:
         return IterPart(sqlLimit.distinct(),
                         sql.distinct().count(), offset, limit)
     return sqlLimit.distinct()
Exemplo n.º 25
0
 def filter_func(q):
     q = q.join(RunScriptTable)
     q = q.filter(
         not_(
             RunScriptTable.Label.in_(
                 ['Blank Pipette 1', 'Blank Pipette 2'])))
     return q
Exemplo n.º 26
0
def get_available_venues( start, end, sitting=None ):
    """get all venues that are not booked for a sitting
    (but sitting if given)
    in the given time period 
    SQL:
    SELECT * 
    FROM venues 
    WHERE venues.venue_id NOT IN (SELECT sitting.venue_id 
        FROM sitting 
        WHERE (sitting.start_date BETWEEN '2000-01-01' AND '2000-01-02' 
            OR sitting.end_date BETWEEN '2000-01-01'  AND '2000-01-02'
            OR '2000-01-01'  BETWEEN sitting.start_date AND sitting.end_date 
            OR '2000-01-02'  BETWEEN sitting.start_date AND sitting.end_date) 
        AND sitting.venue_id IS NOT NULL)
    """
    session = Session()
    query = session.query(domain.Venue)
    b_filter = sql.and_(
        sql.or_( 
            sql.between(schema.sitting.c.start_date, start, end), 
            sql.between(schema.sitting.c.end_date, start, end),
            sql.between(start, schema.sitting.c.start_date, 
                schema.sitting.c.end_date),
            sql.between(end, schema.sitting.c.start_date, 
                schema.sitting.c.end_date)
        ),
        schema.sitting.c.venue_id != None)
    if sitting:
        if sitting.sitting_id:
            b_filter = sql.and_(b_filter,
                schema.sitting.c.sitting_id != sitting.sitting_id)
    query = query.filter(sql.not_(schema.venue.c.venue_id.in_(
                sql.select( [schema.sitting.c.venue_id] ).where(b_filter) )))
    venues = query.all()
    return venues
Exemplo n.º 27
0
 def roles(self, new_roles):
     db().query(Role).filter(not_(Role.name.in_(new_roles))).delete(synchronize_session="fetch")
     added_roles = self.roles
     for role in new_roles:
         if role not in added_roles:
             self.role_list.append(Role(name=role, release=self))
             added_roles.append(role)
Exemplo n.º 28
0
    def find(self, reservation_id=None, resource_id=None, slice_id=None, user_id=None, start_time=None, end_time=None):
        """
        Returns a list of reservation value objects (see class description).
        If all parameters a None, all reservations for this schedule_subject will be returned.
        If given parameters are not-None the result will be filtered by the respective field.
        If multiple params are given the result will be reduced (conditions will be AND-ed).
        If the times are given, all records which touch the given period will be returned.
        If {start_time} is given, but {end_time} is omitted, all records which span start_time will be returned.
        
        Limitations:
        - This method can not be used to filter records with NULL fields. E.g. it is not possible to filter all records to the ones which have set user_id to NULL.
        """
        q = db_session.query(ReservationRecord)
        q = q.filter_by(schedule_subject=self.schedule_subject)
        if not reservation_id is None:
            q = q.filter_by(reservation_id=reservation_id)
        if not resource_id is None:
            q = q.filter_by(resource_id=resource_id)
        if not slice_id is None:
            q = q.filter_by(slice_id=slice_id)
        if not user_id is None:
            q = q.filter_by(user_id=user_id)

        if (not start_time is None) and (end_time is None):
            end_time = start_time
        if (not start_time is None) and (not end_time is None):
            q = q.filter(not_(or_(ReservationRecord.end_time < start_time, ReservationRecord.start_time > end_time)))

        records = q.all()
        result = [self._convert_record_to_value_object(r) for r in records]
        db_session.expunge_all()
        return result
Exemplo n.º 29
0
def questions():
    '''POST endpoint to get questions to play the quiz.
        - Request Arguments: category, previous questions
        - Returns: A random question
    '''
    try:
        req_body = json.loads(request.data)
        previous_questions = req_body.get('previous_questions')
        quiz_category = req_body.get('quiz_category')['id']
    except Exception:
        abort(400)

    queries = []
    if quiz_category != 0:
        queries.append(Question.category == quiz_category)
    queries.append(not_(Question.id.in_(previous_questions)))

    try:
        question = Question.query.filter(*queries).order_by(
            func.random()).first()
        data = {
            'question': question.format,
        }
    except AttributeError:
        data = {}
    except Exception:
        abort(500)
    finally:
        db.session.close()
    return generate_response(data=data)
Exemplo n.º 30
0
 def constructQuery(self, context):
     session= Session()
     trusted=removeSecurityProxy(context)
     parliament_id = self._get_parliament_id(context)
     trusted = removeSecurityProxy(context)
     assigned_committee_ids = []
     if IGroupGroupItemAssignment.providedBy(trusted):
         committee_id = getattr(trusted, "group_id", None)
         if committee_id:
             query = session.query(domain.Committee).filter(
                 sql.and_(
                     domain.Committee.parent_group_id == parliament_id,
                     domain.Committee.group_id == committee_id
                 )
             )
             return query
         else:
             assigned_committee_ids = \
                 [ comm.group_id for comm in trusted.__parent__.values() ]
     else:
         assigned_committee_ids = \
             [ comm.group_id for comm in trusted.values() ]
     query = session.query(domain.Committee).filter(
         sql.and_(
             domain.Committee.status == 'active',
             domain.Committee.parent_group_id == parliament_id,
             sql.not_(
                 domain.Committee.group_id.in_(assigned_committee_ids)
             )
         )
     )
     return query
Exemplo n.º 31
0
 def constructQuery(self, context):
     session = Session()
     trusted = removeSecurityProxy(context)
     parliament_id = self._get_parliament_id(context)
     trusted = removeSecurityProxy(context)
     assigned_committee_ids = []
     if IGroupGroupItemAssignment.providedBy(trusted):
         committee_id = getattr(trusted, "group_id", None)
         if committee_id:
             query = session.query(domain.Committee).filter(
                 sql.and_(domain.Committee.parent_group_id == parliament_id,
                          domain.Committee.group_id == committee_id))
             return query
         else:
             assigned_committee_ids = \
                 [ comm.group_id for comm in trusted.__parent__.values() ]
     else:
         assigned_committee_ids = \
             [ comm.group_id for comm in trusted.values() ]
     query = session.query(domain.Committee).filter(
         sql.and_(
             domain.Committee.status == 'active',
             domain.Committee.parent_group_id == parliament_id,
             sql.not_(
                 domain.Committee.group_id.in_(assigned_committee_ids))))
     return query
Exemplo n.º 32
0
    def index(self):
        self.template = 'index.phtml'

        # top rated
        top_rated = Rating.top_rated(self.session, limit=6)
        self.view['top_rated'] = top_rated

        # last rated
        last_rated = self.view['last_rated']
        shuffle(last_rated)
        last_rated[5:] = []
        ids = map(itemgetter1, self.view['last_rated'])
        movies = self.session.query(Movie).filter(Movie.id.in_(ids)).all()
        movies = dict((m.id, m)
                      for m in movies)
        self.view['movies'] = movies

        # recent users
        self.view['recent_users'] = User.recent(self.session, limit=8)

        # recent ratings
        already_shown = set(ids)
        already_shown.update((m.id for m, _ in top_rated))
        recent = self.session.query(Rating)\
                     .options(joinedload(Rating.movie))\
                     .options(joinedload(Rating.user))\
                     .filter(not_(Rating.movie_id.in_(already_shown)))\
                     .order_by(Rating.rated.desc()).limit(15).all()
        shuffle(recent)
        recent[10:] = []
        self.view['recent_ratings'] = recent
Exemplo n.º 33
0
    def get_wall_events_query(self):
        user_is_admin_of_groups = [membership.group_id
                                   for membership in self.memberships
                                   if membership.membership_type == 'administrator']
        subjects = self.all_watched_subjects
        if self.is_teacher:
            subjects += self.taught_subjects
        from ututi.lib.wall import generic_events_query
        evts_generic = generic_events_query()

        t_evt = meta.metadata.tables['events']
        t_evt_comments = meta.metadata.tables['event_comments']
        t_wall_posts = meta.metadata.tables['wall_posts']
        t_content_items = meta.metadata.tables['content_items']
        subject_ids = [s.id for s in subjects]
        group_ids = [m.group.id for m in self.memberships]
        user_commented_evts_select = select([t_evt_comments.c.event_id],
                                            from_obj=[t_evt_comments.join(t_content_items,
                                                                          t_content_items.c.id == t_evt_comments.c.id)],)\
            .where(t_content_items.c.created_by == self.id)
        user_commented_evts = map(lambda r: r[0], meta.Session.execute(user_commented_evts_select).fetchall())

        query = evts_generic\
            .where(or_(or_(t_evt.c.object_id.in_(subject_ids),
                           t_wall_posts.c.subject_id.in_(subject_ids)) if subject_ids else False,  # subject wall posts
                       and_(or_(t_evt.c.author_id == self.id,  # location wall posts
                                # XXX User comments may grow to 1k-10k scale, consider a different implementation.
                                t_evt.c.id.in_(user_commented_evts) if user_commented_evts else False),
                            t_evt.c.event_type.in_(('subject_wall_post', 'location_wall_post'))),
                       or_(t_evt.c.object_id.in_(group_ids),) if group_ids else False))\
            .where(or_(t_evt.c.event_type != 'moderated_post_created',
                       t_evt.c.object_id.in_(user_is_admin_of_groups) if user_is_admin_of_groups else False))\
            .where(not_(t_evt.c.event_type.in_(self.ignored_events_list) if self.ignored_events_list else False))
        return query
Exemplo n.º 34
0
def gather_dataset_ids():
    '''Collects all dataset ids to reindex.'''
    package_obj_found = {}
    # pylint: disable=E1101
    # read orgs related to a harvest source
    subquery_harvest_orgs = model.Session.query(model.Group.id).distinct() \
        .join(model.Package, model.Package.owner_org == model.Group.id) \
        .join(harvest_model.HarvestSource, harvest_model.HarvestSource.id == model.Package.id) \
        .filter(model.Package.state == model.State.ACTIVE) \
        .filter(harvest_model.HarvestSource.active.is_(True)) \
        .filter(model.Group.state == model.State.ACTIVE) \
        .filter(model.Group.is_organization.is_(True)) \
        .subquery()

    # read all package IDs to reindex
    package_extra_alias = aliased(model.PackageExtra)

    query = model.Session.query(model.Package.id, model.Package.owner_org).distinct() \
        .outerjoin(model.PackageExtra, model.PackageExtra.package_id == model.Package.id) \
        .filter(model.Package.type != 'harvest') \
        .filter(model.Package.state == model.State.ACTIVE) \
        .filter(or_(model.Package.owner_org.notin_(subquery_harvest_orgs),
                    and_(model.Package.owner_org.in_(subquery_harvest_orgs),
                         not_(model.Session.query(model.Package.id)
                              .filter(and_(model.Package.id == package_extra_alias.package_id,
                                           package_extra_alias.state == model.State.ACTIVE,
                                           package_extra_alias.key == EXTRA_KEY_HARVESTED_PORTAL))
                              .exists()))))
    # pylint: enable=E1101

    for row in query:
        package_obj_found[row[0]] = row[1]

    return package_obj_found
Exemplo n.º 35
0
def user_index():
    search_form = SearchActivForm()
    choice_form = stateChoiceForm()
    if choice_form.validate_on_submit():
        types = choice_form.type.data
        state = User.query.filter(or_(User.state.like(types))).all()
        return render_template('admin/admin_list.html',
                               choice_form=choice_form,
                               search_form=search_form,
                               activities=state)
    if search_form.validate_on_submit():
        keywords = search_form.keywords.data
        # types = choice_form.type.data
        # state = User.query.filter(or_(User.state.like(types))).all()
        activities = User.query.filter(
            or_(User.username.like('%' + keywords + '%'), )).all()
        return render_template('admin/admin_list.html',
                               search_form=search_form,
                               activities=activities)
    page = request.args.get('page', 1, type=int)
    pagination = User.query.filter(not_(User.username == 'admin')).paginate(
        page,
        per_page=current_app.config['FLASY_NEWS_PER_PAGE'],
        error_out=False)
    user = pagination.items
    return render_template("admin/admin_list.html",
                           activities=user,
                           search_form=search_form,
                           pagination=pagination,
                           choice_form=choice_form)
Exemplo n.º 36
0
def adv_search_extension(q, include_extension_inputs,
                         exclude_extension_inputs):
    for extension in include_extension_inputs:
        q = q.filter(db.Books.data.any(db.Data.format == extension))
    for extension in exclude_extension_inputs:
        q = q.filter(not_(db.Books.data.any(db.Data.format == extension)))
    return q
Exemplo n.º 37
0
def get_available_venues( start, end, sitting=None ):
    """get all venues that are not booked for a sitting
    (but sitting if given)
    in the given time period 
    SQL:
    SELECT * 
    FROM venues 
    WHERE venues.venue_id NOT IN (SELECT sitting.venue_id 
        FROM sitting 
        WHERE (sitting.start_date BETWEEN '2000-01-01' AND '2000-01-02' 
            OR sitting.end_date BETWEEN '2000-01-01'  AND '2000-01-02'
            OR '2000-01-01'  BETWEEN sitting.start_date AND sitting.end_date 
            OR '2000-01-02'  BETWEEN sitting.start_date AND sitting.end_date) 
        AND sitting.venue_id IS NOT NULL)
    """
    session = Session()
    query = session.query(domain.Venue)
    b_filter = sql.and_(
        sql.or_( 
            sql.between(schema.sitting.c.start_date, start, end), 
            sql.between(schema.sitting.c.end_date, start, end),
            sql.between(start, schema.sitting.c.start_date, 
                schema.sitting.c.end_date),
            sql.between(end, schema.sitting.c.start_date, 
                schema.sitting.c.end_date)
        ),
        schema.sitting.c.venue_id != None)
    if sitting:
        if sitting.sitting_id:
            b_filter = sql.and_(b_filter,
                schema.sitting.c.sitting_id != sitting.sitting_id)
    query = query.filter(sql.not_(schema.venue.c.venue_id.in_(
                sql.select( [schema.sitting.c.venue_id] ).where(b_filter) )))
    venues = query.all()
    return venues
Exemplo n.º 38
0
def user_view(user_id):
  user = User.query.get(user_id)
  tab = request.args.get("tab", "activity")

  e = Env(label=user.name, user=user)
  e.bread_crumbs = make_bread_crumbs()
  e.active_tab_id = tab
  e.tabs = make_tabs(user)

  if tab == "conversations":
    pass

  elif tab == "activity":
    # XXX quick & dirty
    e.activity_entries =\
      ActivityEntry.query.filter(ActivityEntry.actor_id == user.id).limit(30).all()

  elif tab == "audit":
    # XXX quick & dirty
    e.audit_entries =\
      AuditEntry.query.filter(AuditEntry.user_id == user.id).limit(30).all()

  elif tab in ("documents", "images"):
    files = File.query.filter(File.owner_id == user_id)
    if tab == "documents":
      files = files.filter(not_(File.mime_type.like("image/%")))
      e.documents = files.all()
    elif tab == "images":
      files = files.filter(File.mime_type.like("image/%"))
      e.images = files.all()

  return render_template("social/user.html", **e)
Exemplo n.º 39
0
 def getPotential(self,
                  blogId,
                  excludeSources=True,
                  offset=None,
                  limit=None,
                  detailed=True,
                  qu=None,
                  qs=None):
     '''
     @see: IBlogCollaboratorService.getPotential
     '''
     sqlBlog = self.session().query(BlogCollaboratorMapped.Id).filter(
         BlogCollaboratorMapped.Blog == blogId)
     sql = self.session().query(CollaboratorMapped).join(UserMapped).join(
         SourceMapped)
     sql = sql.filter(not_(CollaboratorMapped.Id.in_(sqlBlog)))
     sql = sql.filter(UserMapped.DeletedOn == None)
     sql = sql.order_by(CollaboratorMapped.Name)
     if excludeSources: sql = sql.filter(CollaboratorMapped.User != None)
     if qu: sql = buildQuery(sql, qu, UserMapped)
     if qs: sql = buildQuery(sql, qs, SourceMapped)
     sqlLimit = buildLimits(sql, offset, limit)
     if detailed:
         return IterPart(sqlLimit.all(), sql.count(), offset, limit)
     return sql.all()
Exemplo n.º 40
0
    def build_clause(self, spec):
        """
        Construct SQLAlchemy binary expression filter clause from the
        given filter specification.

        Parameters
        ----------
        spec : FilterSpec
            Filter specification for which to construct filter clause

        Returns
        -------
        tuple
            Tuple of filter binary expression clause and and a list of
            model attributes/descriptors which should be joined when
            doing filtering. If these attributes are not joined,
            SQLAlchemy will not join appropriate tables hence
            wont be able to correctly filter data.
        """
        to_join = []

        model = self.model
        for component in spec.components:
            _field = getattr(model, component)
            field = self._get_properties_for_model(model)[component]
            try:
                model = self._get_related_model_for_field(field)
            except AttributeError:
                break
            else:
                to_join.append(_field)

        # cant directly compare instrumented attributes
        # so need to collect object ids which are unique
        # since they are model singletons
        existing_eagerloads_ids = []
        for option in self.queryset._with_options:
            for suboption in itertools.takewhile(
                    lambda i: i.strategy[0] == ("lazy", "joined"),
                    option._to_bind):
                existing_eagerloads_ids.append([id(i) for i in suboption.path])

        already_joined_ids = set()
        for i in range(1, len(to_join) + 1):
            sub_to_join_ids = [id(i) for i in to_join[:i]]
            if sub_to_join_ids in existing_eagerloads_ids:
                already_joined_ids |= set(sub_to_join_ids)

        n_already_joined = len(already_joined_ids)
        to_join = to_join[n_already_joined:]

        builder = getattr(self, "_build_clause_{}".format(spec.lookup))
        column = self._get_attribute_for_field(field)
        clause = builder(spec, column)

        if spec.is_negated:
            clause = not_(clause)

        return clause, to_join
Exemplo n.º 41
0
def history():
    streques = (models.Streque.query
                .filter(not_(models.Streque.too_old()),
                        models.Streque.voided.is_(False))
                .order_by(models.Streque.timestamp.desc())
                .all())

    return flask.render_template('history.html', streques=streques)
Exemplo n.º 42
0
 def roles(self, new_roles):
     db().query(Role).filter(not_(Role.name.in_(new_roles))).filter(
         Role.release_id == self.id).delete(synchronize_session='fetch')
     added_roles = self.roles
     for role in new_roles:
         if role not in added_roles:
             self.role_list.append(Role(name=role, release=self))
             added_roles.append(role)
Exemplo n.º 43
0
 def get_unsolved_challenges(self):
     """
     Return a query that produces a list of all unsolved challenges for a
     given team.
     """
     team_solved_subquery = get_team_solved_subquery(self.id)
     online = get_online_challenges()
     return online.filter(not_(team_solved_subquery))
Exemplo n.º 44
0
def queue_processed_recipesets(*args):
    work_done = False
    recipesets = RecipeSet.query.join(RecipeSet.job)\
            .filter(not_(Job.is_deleted))\
            .filter(not_(RecipeSet.recipes.any(
                Recipe.status != TaskStatus.processed)))\
            .order_by(RecipeSet.priority.desc())\
            .order_by(RecipeSet.id)
    for rs_id, in recipesets.values(RecipeSet.id):
        session.begin()
        try:
            queue_processed_recipeset(rs_id)
            session.commit()
        except Exception, e:
            log.exception('Error in queue_processed_recipeset(%s)', rs_id)
            session.rollback()
        finally:
Exemplo n.º 45
0
    def search_moc(self, facility_id, public, moc_id=None, state=None,
                   with_description=False, order_by_date=False):
        """
        Search for MOC records for a facility.
        """

        select_cols = [
            moc.c.id,
            moc.c.facility_id,
            moc.c.name,
            moc.c.public,
            moc.c.uploaded,
            moc.c.num_cells,
            moc.c.area,
            moc.c.state,
        ]

        if with_description:
            select_cols.append(moc.c.description)
            select_cols.append(moc.c.description_format)
            default = {}
        else:
            default = {'description': None, 'description_format': None}

        stmt = select(select_cols)

        if facility_id is not None:
            stmt = stmt.where(moc.c.facility_id == facility_id)

        if public is not None:
            if public:
                stmt = stmt.where(moc.c.public)
            else:
                stmt = stmt.where(not_(moc.c.public))

        if moc_id is not None:
            stmt = stmt.where(moc.c.id == moc_id)

        if state is not None:
            if is_list_like(state):
                stmt = stmt.where(moc.c.state.in_(state))
            else:
                stmt = stmt.where(moc.c.state == state)

        if order_by_date:
            stmt = stmt.order_by(moc.c.uploaded.desc())
        else:
            stmt = stmt.order_by(moc.c.id.asc())

        ans = ResultCollection()

        with self._transaction() as conn:
            for row in conn.execute(stmt):
                values = default.copy()
                values.update(**row)
                ans[row['id']] = MOCInfo(**values)

        return ans
Exemplo n.º 46
0
def check_field_association(session, tid, field_dict):
    if field_dict.get('fieldgroup_id', '') and session.query(models.Field).filter(models.Field.id == field_dict['fieldgroup_id'],
                                                                                  models.Field.tid != tid).count():
        raise errors.InputValidationError()

    if field_dict.get('template_id', '') and session.query(models.Field).filter(models.Field.id == field_dict['template_id'],
                                                                                not_(models.Field.tid.in_(set([1, tid])))).count():
        raise errors.InputValidationError()

    if field_dict.get('step_id', '') and session.query(models.Field).filter(models.Step.id == field_dict['step_id'],
                                                                            models.Questionnaire.id == models.Step.questionnaire_id,
                                                                            not_(models.Questionnaire.tid.in_(set([1, tid])))).count():
        raise errors.InputValidationError()

    if field_dict.get('fieldgroup_id', ''):
        ancestors = set(fieldtree_ancestors(session, field_dict['fieldgroup_id']))
        if field_dict['id'] == field_dict['fieldgroup_id'] or field_dict['id'] in ancestors:
            raise errors.InputValidationError("Provided field association would cause recursion loop")
Exemplo n.º 47
0
 def _op1(self, values, op, elem):
     elem = self._walk(values, elem)
     if op == "+":
         return + elem
     elif op == "-":
         return - elem
     elif op == "not":
         return expr.not_(elem)
     assert False, "should not happen"
    def _makeQueryFilter(self):
        # Query filter has already been generated.
        if self._queryFilter is not None:
            return self._queryFilter

        # There's no filter to apply.
        if self._sqlFilterConditionListDict is None:
            return None

        queryFilterList = []

        for key, sqlFilterConditionList in self._sqlFilterConditionListDict.items(
        ):
            variableName, negate = key
            variable = getattr(SQLModsecurityAuditEntryMessage, variableName)

            # If it's a negation, we create one 'NOT IN' query filter for all values.
            if negate:
                with closing(self._sessionMaker()) as session:
                    # Now we store the filter and it's variables in the database before using them through sub queries.
                    sqlFilterObject = SQLFilter(
                        conditionList=sqlFilterConditionList)
                    session.add(sqlFilterObject)
                    session.commit()

                    # Crawling group items.

                    # @hack: why is this line necessary ?
                    sqlFilterObject = session.query(SQLFilter).filter(
                        SQLFilter.id == sqlFilterObject.id).one()

                    # Making a 'NOT IN' filter with not null values.
                    sqlFilterVariableValueIterable = session.query(SQLFilterCondition.variableValue)\
                                                            .with_parent(sqlFilterObject)\
                                                            .filter(SQLFilterCondition.variableValue != None)
                    queryFilter = not_(
                        variable.in_(sqlFilterVariableValueIterable))

                    # @hack: managing NULL values as they are ignored by 'IN' and 'NOT IN'.
                    # Making a 'IS NOT NULL' filter if there's a null value.
                    if session.query(SQLFilterCondition.variableValue)\
                              .with_parent(sqlFilterObject)\
                              .filter(SQLFilterCondition.variableValue == None)\
                              .count() > 0:
                        queryFilter = and_(queryFilter, variable != None)
                    else:
                        queryFilter = or_(queryFilter, variable == None)
                    queryFilterList.append(queryFilter)

            # We create a filter for each value if it's an equality condition.
            else:
                for sqlFilterCondition in sqlFilterConditionList:
                    queryFilterList.append(
                        variable == sqlFilterCondition.variableValue)

        self._queryFilter = and_(*queryFilterList)
        return self._queryFilter
Exemplo n.º 49
0
            def filter_missing(q_self, kw):
                """Return a Query object that restricts to dictionaries
                NOT containing the given keyword"""
                q = q_self._query
                T = h_self._Dict

                q = q.filter(not_(T._attrs.any(name=kw)))

                return h_self._Query(q)
Exemplo n.º 50
0
 def get_random(user: '******') -> 'Optional[LearningDatapoint]':
     return LearningDatapoint.query.order_by(
         expression.func.random()).filter(
             expression.not_(
                 LearningDatapointSubmission.query.filter(
                     LearningDatapoint.id ==
                     LearningDatapointSubmission.datapoint_id,
                     LearningDatapointSubmission.user_id ==
                     user.id).exists())).first()
Exemplo n.º 51
0
 def check_vm_name(self, vm_name, vm_id):
     from convirt.model import DBSession
     from convirt.model.VM import VM
     query = DBSession.query(VM)
     if vm_id:
         query = query.filter(not_(VM.id==vm_id))
     vm = query.filter(VM.name==vm_name).first()
     if vm :
         return dict(success=False,msg='VM <b>'+vm_name+'</b> already exists.')
     return dict(success=True,msg='')
Exemplo n.º 52
0
def db_update_fieldattrs(session, field_id, field_attrs, language):
    attrs_ids = [db_update_fieldattr(session, field_id, attr_name, attr, language) for attr_name, attr in field_attrs.items()]

    if not attrs_ids:
        return

    to_remove = session.query(models.FieldAttr.id).filter(models.FieldAttr.field_id == field_id,
                                                                  not_(models.FieldAttr.id.in_(attrs_ids)))

    session.query(models.FieldAttr).filter(models.FieldAttr.id.in_(to_remove.subquery())).delete(synchronize_session='fetch')
Exemplo n.º 53
0
Arquivo: DAO.py Projeto: joubu/CDL
 def downloads(cls):
     try:
         from modele import Download, Config
         url_blacklist = [e.url for e in 
                 session.query(Config).first().blacklist]
                     
         return session.query(Download).filter(
                 not_(Download.url.in_(url_blacklist))).all()
     except Exception, e:
         return None
Exemplo n.º 54
0
def db_update_fieldoptions(session, field_id, options, language):
    options_ids = [db_update_fieldoption(session, field_id, option['id'], option, language, idx) for idx, option in enumerate(options)]

    if not options_ids:
        return

    to_remove = session.query(models.FieldOption.id).filter(models.FieldOption.field_id == field_id,
                                                            not_(models.FieldOption.id.in_(options_ids)))

    session.query(models.FieldOption).filter(models.FieldOption.id.in_(to_remove.subquery())).delete(synchronize_session='fetch')
Exemplo n.º 55
0
    def list(self, status = ['active'], limit_offset = None, starts_with = None, search = None):

        db = get_session()

        # Make a list from string
        if not isinstance(status, (list, tuple)):
            status = [status]


        q = db.query(Movie) \
            .join(Movie.library, Library.titles) \
            .options(joinedload_all('releases.status')) \
            .options(joinedload_all('releases.quality')) \
            .options(joinedload_all('releases.files')) \
            .options(joinedload_all('releases.info')) \
            .options(joinedload_all('library.titles')) \
            .options(joinedload_all('library.files')) \
            .options(joinedload_all('status')) \
            .options(joinedload_all('files')) \
            .filter(LibraryTitle.default == True) \
            .filter(or_(*[Movie.status.has(identifier = s) for s in status]))

        filter_or = []
        if starts_with:
            starts_with = toUnicode(starts_with.lower())
            if starts_with in ascii_lowercase:
                filter_or.append(LibraryTitle.simple_title.startswith(starts_with))
            else:
                ignore = []
                for letter in ascii_lowercase:
                    ignore.append(LibraryTitle.simple_title.startswith(toUnicode(letter)))
                filter_or.append(not_(or_(*ignore)))

        if search:
            filter_or.append(LibraryTitle.simple_title.like('%%' + search + '%%'))

        if filter_or:
            q = q.filter(or_(*filter_or))

        q = q.order_by(asc(LibraryTitle.simple_title))

        if limit_offset:
            splt = limit_offset.split(',')
            limit = splt[0]
            offset = 0 if len(splt) is 1 else splt[1]
            q = q.limit(limit).offset(offset)

        results = q.all()

        movies = []
        for movie in results:
            temp = movie.to_dict(self.default_dict)
            movies.append(temp)

        return movies
Exemplo n.º 56
0
 def constructQuery(self, context):
     session= Session()
     trusted=removeSecurityProxy(context)
     parliament_id = self._get_parliament_id(trusted)
     mp_user_ids = sql.select([schema.user_group_memberships.c.user_id], 
         schema.user_group_memberships.c.group_id == parliament_id)
     query = session.query(domain.User).filter(sql.and_(
         sql.not_(domain.User.user_id.in_(mp_user_ids)),
         domain.User.active_p == 'A')).order_by(
             domain.User.last_name, domain.User.first_name)
     return query
Exemplo n.º 57
0
    def __init__(self, course_hedu_id, bra_id):
        Major.__init__(self, course_hedu_id, bra_id)
        self.course_hedu_id = course_hedu_id

        self.max_year_query = db.session.query(
            func.max(Ybc_hedu.year)).filter_by(course_hedu_id=course_hedu_id)

        if bra_id == '':
            self.hedu_query = Ybc_hedu.query.filter(
                Ybc_hedu.course_hedu_id == self.course_hedu_id,
                Ybc_hedu.year == self.max_year_query,
                not_(Ybc_hedu.bra_id.like('0xx%')),
                func.length(Ybc_hedu.bra_id) == 9)
        else:
            self.hedu_query = Ybc_hedu.query.filter(
                Ybc_hedu.course_hedu_id == self.course_hedu_id,
                Ybc_hedu.year == self.max_year_query,
                Ybc_hedu.bra_id.like(self.bra_id+'%'),
                not_(Ybc_hedu.bra_id.like('0xx%')),
                func.length(Ybc_hedu.bra_id) == 9)
Exemplo n.º 58
0
def test_not_clause():
    raw_clause = {
        'op': 'not',
        'clause': {
                'col': 'project_name',
                'op': 'eq',
                'other': 'quijibo',
        },
    }
    expected_clause = not_(proj.c.project_name == 'quijibo')
    clause_eq(raw_clause, expected_clause)
    def _makeQueryFilter(self):
        # Query filter has already been generated.
        if self._queryFilter is not None:
            return self._queryFilter
        
        # There's no filter to apply.
        if self._sqlFilterConditionListDict is None:
            return None
        
        queryFilterList = []
        
        for key, sqlFilterConditionList in self._sqlFilterConditionListDict.items():
            variableName, negate = key
            variable = getattr(SQLModsecurityAuditEntryMessage, variableName)

            # If it's a negation, we create one 'NOT IN' query filter for all values.
            if negate:
                with closing(self._sessionMaker()) as session:
                    # Now we store the filter and it's variables in the database before using them through sub queries.
                    sqlFilterObject = SQLFilter(conditionList = sqlFilterConditionList)
                    session.add(sqlFilterObject)
                    session.commit()
    
                    # Crawling group items.
                    
                    # @hack: why is this line necessary ?
                    sqlFilterObject = session.query(SQLFilter).filter(SQLFilter.id == sqlFilterObject.id).one()
                    
                    # Making a 'NOT IN' filter with not null values.
                    sqlFilterVariableValueIterable = session.query(SQLFilterCondition.variableValue)\
                                                            .with_parent(sqlFilterObject)\
                                                            .filter(SQLFilterCondition.variableValue != None)
                    queryFilter = not_(variable.in_(sqlFilterVariableValueIterable))
                    
                    # @hack: managing NULL values as they are ignored by 'IN' and 'NOT IN'.
                    # Making a 'IS NOT NULL' filter if there's a null value.
                    if session.query(SQLFilterCondition.variableValue)\
                              .with_parent(sqlFilterObject)\
                              .filter(SQLFilterCondition.variableValue == None)\
                              .count() > 0:
                        queryFilter = and_(queryFilter, variable != None)
                    else:
                        queryFilter = or_(queryFilter, variable == None)
                    queryFilterList.append(queryFilter)
                    

            # We create a filter for each value if it's an equality condition.
            else:
                for sqlFilterCondition in sqlFilterConditionList:
                    queryFilterList.append(variable == sqlFilterCondition.variableValue)

        self._queryFilter = and_(*queryFilterList)
        return self._queryFilter
Exemplo n.º 60
0
 def index(self,**kw):
     self.col_call = 0
     self.max_cols = 0
     self.job_ids = []
     matrix_options = {}
     matrix_options['whiteboard_options'] = self.get_whiteboard_options(kw.get('filter'), selected=kw.get('whiteboard'))
     if ('job_ids' in kw or 'whiteboard' in kw):
         job_ids = []
         if 'job_ids' in kw:
             job_ids = [int(j) for j in kw['job_ids'].split()]
             # Filter out ids of deleted jobs
             query = model.Job.query.filter(not_(model.Job.is_deleted)).filter(model.Job.id.in_(job_ids))
             job_ids = [job_id for job_id, in query.values(model.Job.id)]
         # Build the result grid
         gen_results = self.generate(whiteboard=kw.get('whiteboard'),
             job_ids=job_ids, toggle_nacks=kw.get('toggle_nacks_on'))
         matrix_options['grid'] = gen_results['grid']
         matrix_options['list'] = gen_results['data']
         if 'whiteboard' in kw: # Getting results by whiteboard
             jobs = model.Job.by_whiteboard(kw.get('whiteboard')).filter(not_(model.Job.is_deleted))
             job_count = jobs.count()
             if job_count > model.Job.max_by_whiteboard:
                 flash(_('Your whiteboard contains %s jobs, only %s will be used' % (job_count, model.Job.max_by_whiteboard)))
             jobs = jobs.limit(model.Job.max_by_whiteboard)
             job_ids = [str(j.id) for j in jobs]
             self.job_ids = job_ids
             matrix_options['job_ids_vals'] = "\n".join([str(j) for j in job_ids])
         elif job_ids: #Getting results by job id
             self.job_ids = job_ids
             matrix_options['job_ids_vals'] = '\n'.join([str(j) for j in job_ids])
         if 'toggle_nacks_on' in kw:
             matrix_options['toggle_nacks_on'] = True
         else:
             matrix_options['toggle_nacks_on'] = False
     else:
         matrix_options['toggle_nacks_on'] = False
         matrix_options['grid'] = None
    
     return dict(widget=self.job_matrix_widget, widget_options=matrix_options,
             title="Job Matrix Report", value=None, widget_attrs={})