Ejemplo n.º 1
0
    def base_query(self, query):
        query = query.options(joinedload(Species.categories), joinedload(Parameter._files))
        if self.languages:
            for i, lang in enumerate(self.languages):
                query = query.outerjoin(
                    self._langs[i],
                    and_(self._langs[i].language_pk == lang.pk,
                         self._langs[i].parameter_pk == Parameter.pk))

            query = query \
                .filter(or_(*[
                    self._langs[i].pk != null() for i in range(len(self._langs))]))\
                .outerjoin(SpeciesCategory, SpeciesCategory.species_pk == Parameter.pk) \
                .outerjoin(
                    Category,
                    and_(
                        SpeciesCategory.category_pk == Category.pk,
                        Category.language_pk.in_([l.pk for l in self.languages])))\
                .options(joinedload_all(Parameter.valuesets, ValueSet.values))
        else:
            query = query\
                .outerjoin(SpeciesCategory, SpeciesCategory.species_pk == Parameter.pk) \
                .outerjoin(Category,
                           and_(SpeciesCategory.category_pk == Category.pk,
                                Category.language_pk == null()))
        return query.distinct()
Ejemplo n.º 2
0
def glottologmeta(request):
    q = DBSession.query(Languoid)\
        .filter(Language.active == true())\
        .filter(Languoid.status.in_(
            (LanguoidStatus.established, LanguoidStatus.unattested)))
    qt = q.filter(Languoid.father_pk == null())
    res = {
        'last_update': DBSession.query(Language.updated)
        .order_by(Language.updated.desc()).first()[0],
        'number_of_families': qt.filter(Languoid.level == LanguoidLevel.family).count(),
        'number_of_isolates': qt.filter(Languoid.level == LanguoidLevel.language).count(),
    }
    ql = q.filter(Languoid.hid != null())
    res['number_of_languages'] = {'all': ql.count()}
    res['special_families'] = OrderedDict()
    for name in SPECIAL_FAMILIES:
        l = qt.filter(Language.name == name).one()
        res['special_families'][name] = l
        res['number_of_languages'][name] = l.child_language_count

    res['number_of_languages']['l1'] = res['number_of_languages']['all'] \
        - res['number_of_languages']['Pidgin']\
        - res['number_of_languages']['Artificial Language']\
        - res['number_of_languages']['Sign Language']
    return res
Ejemplo n.º 3
0
    def test_isnull_notnull(self):
        sat = self.sa_alltypes
        sd = sat.c.double_col
        d = self.alltypes.double_col

        cases = [(d.isnull(), sd.is_(sa.null())), (d.notnull(), sd.isnot(sa.null()))]
        self._check_expr_cases(cases)
Ejemplo n.º 4
0
    def group(self, group, current=None):
        # Filter by group
        sub_query = db.session.query(GroupPatient)
        sub_query = sub_query.filter(GroupPatient.group == group, GroupPatient.patient_id == Patient.id)

        if current is not None:
            if current:
                # Between from and to date
                sub_query = sub_query.filter(
                    GroupPatient.from_date <= func.now(),
                    or_(
                        GroupPatient.to_date == null(),
                        GroupPatient.to_date >= func.now(),
                    )
                )
            else:
                # Outside from or to date
                sub_query = sub_query.filter(or_(
                    GroupPatient.from_date > func.now(),
                    and_(
                        GroupPatient.to_date != null(),
                        GroupPatient.to_date < func.now()
                    )
                ))

        self.query = self.query.filter(sub_query.exists())

        return self
Ejemplo n.º 5
0
def delete_userless_histories(app, cutoff_time, info_only=False, force_retry=False):
    # Deletes userless histories whose update_time value is older than the cutoff_time.
    # The purge history script will handle marking DatasetInstances as deleted.
    # Nothing is removed from disk yet.
    history_count = 0
    start = time.time()
    if force_retry:
        histories = app.sa_session.query(app.model.History) \
                                  .filter(and_(app.model.History.table.c.user_id == null(),
                                               app.model.History.table.c.update_time < cutoff_time))
    else:
        histories = app.sa_session.query(app.model.History) \
                                  .filter(and_(app.model.History.table.c.user_id == null(),
                                               app.model.History.table.c.deleted == false(),
                                               app.model.History.table.c.update_time < cutoff_time))
    for history in histories:
        if not info_only:
            print("Deleting history id ", history.id)
            history.deleted = True
            app.sa_session.add(history)
            app.sa_session.flush()
        history_count += 1
    stop = time.time()
    print("Deleted %d histories" % history_count)
    print("Elapsed time: ", stop - start)
    print("##########################################")
Ejemplo n.º 6
0
def build_partial_day_ips(query, period_start, period_end):
    """Method to build an IP list for the case 2

    when the IP was allocated after the period start and
    is still allocated after the period end.
    This method only looks at public IPv4 addresses.
    """
    # Filter out only IPv4 that were allocated after the period start
    # and have not been deallocated before the period end.
    # allocated_at will be set to a date
    ip_list = (
        query.filter(models.IPAddress.version == 4L)
        .filter(models.IPAddress.network_id == PUBLIC_NETWORK_ID)
        .filter(models.IPAddress.used_by_tenant_id is not None)
        .filter(
            and_(
                models.IPAddress.allocated_at != null(),
                models.IPAddress.allocated_at >= period_start,
                models.IPAddress.allocated_at < period_end,
            )
        )
        .filter(
            or_(
                models.IPAddress._deallocated is False,
                models.IPAddress.deallocated_at == null(),
                models.IPAddress.deallocated_at >= period_end,
            )
        )
        .all()
    )

    return ip_list
Ejemplo n.º 7
0
    def test_draft_creation(self):
        model.Recipe(
            type='MASH',
            name='Rocky Mountain River IPA',
            gallons=5,
            boil_minutes=60,
            notes=u'This is my favorite recipe.',
            state=u'PUBLISHED'
        )
        model.commit()

        model.Recipe.query.first().draft()
        model.commit()

        assert model.Recipe.query.count() == 2
        source = model.Recipe.query.filter(
            model.Recipe.published_version == null()
        ).first()  # noqa
        draft = model.Recipe.query.filter(
            model.Recipe.published_version != null()
        ).first()  # noqa

        assert source != draft
        assert source.type == draft.type == 'MASH'
        assert source.name == draft.name == 'Rocky Mountain River IPA'
        assert source.state != draft.state
        assert draft.state == 'DRAFT'

        assert draft.published_version == source
        assert source.current_draft == draft
Ejemplo n.º 8
0
def upgrade():
    op.alter_column(
        'writeup_post_versions', 'threadpost_id',
        nullable=False)
    op.alter_column(
        'writeup_post_versions', 'writeuppost_id',
        nullable=True)
    op.alter_column(
        'writeup_post_versions', 'html',
        nullable=True)
    op.create_check_constraint(
        'writeup_post_versions_check_html', 'writeup_post_versions',
        sa.and_(sa.column('writeuppost_id') != sa.null(), sa.column('html') != sa.null()))
Ejemplo n.º 9
0
    def comments_for_video(cls, video_id, comment_ids=None):
        """Return comments for a video with commenter name & email."""
        video_comments = cls.query.filter_by(video_id=video_id)
        if comment_ids:
            video_comments = video_comments.filter(cls.id.in_(comment_ids))

        account_comments = video_comments.join(
            AccountUser,
            (VideoComment.user_type == 'account_user') &
            (VideoComment.user_id == AccountUser.id)
        ).with_entities(
            VideoComment,
            AccountUser.display_name.label('name'),
            AccountUser.username.label('email'),
            AccountUser.avatar_filename,
            AccountUser.account_id,
        )

        collab_comments_with_user = video_comments.join(
            VideoCollaborator,
            (VideoComment.user_type == 'collaborator') &
            (VideoComment.user_id == VideoCollaborator.id)
        ).join(
            AccountUser,
            AccountUser.id == VideoCollaborator.account_user_id
        ).with_entities(
            VideoComment,
            AccountUser.display_name.label('name'),
            AccountUser.username.label('email'),
            AccountUser.avatar_filename,
            AccountUser.account_id,
        )

        collab_comments = video_comments.join(
            VideoCollaborator,
            (VideoComment.user_type == 'collaborator') &
            (VideoComment.user_id == VideoCollaborator.id) &
            (VideoCollaborator.account_user_id.is_(None))
        ).with_entities(
            VideoComment,
            VideoCollaborator.name,
            VideoCollaborator.email,
            null().label('avatar_filename'),
            null().label('account_id'),
        )

        return account_comments.union_all(
            collab_comments_with_user, collab_comments).order_by(VideoComment.date_added)
def migrate_claims(migrate_engine, metadata, buildrequests, objects,
                   buildrequest_claims):

    # First, ensure there is an object row for each master
    null_id = sa.null().label('id')
    if migrate_engine.dialect.name == 'postgresql':
        # postgres needs NULL cast to an integer:
        null_id = sa.cast(null_id, sa.INTEGER)
    new_objects = sa.select([
        null_id,
        buildrequests.c.claimed_by_name.label("name"),
        sa.literal_column("'BuildMaster'").label("class_name"),
    ],
        whereclause=buildrequests.c.claimed_by_name != NULL,
        distinct=True)

    # this doesn't seem to work without str() -- verified in sqla 0.6.0 - 0.7.1
    migrate_engine.execute(
        str(sautils.InsertFromSelect(objects, new_objects)))

    # now make a buildrequest_claims row for each claimed build request
    join = buildrequests.join(objects,
                              (buildrequests.c.claimed_by_name == objects.c.name)
                              # (have to use sa.text because str, below, doesn't work
                              # with placeholders)
                              & (objects.c.class_name == sa.text("'BuildMaster'")))
    claims = sa.select([
        buildrequests.c.id.label('brid'),
        objects.c.id.label('objectid'),
        buildrequests.c.claimed_at,
    ], from_obj=[join],
        whereclause=buildrequests.c.claimed_by_name != NULL)
    migrate_engine.execute(
        str(sautils.InsertFromSelect(buildrequest_claims, claims)))
Ejemplo n.º 11
0
def get_scheduled_jobs_to_start(time, batch_size=None, session=None):
    query = b.model_query(models.ScheduledJob)

    execute_at_col = models.ScheduledJob.execute_at
    captured_at_col = models.ScheduledJob.captured_at

    # Filter by execution time accounting for a configured job pickup interval.
    query = query.filter(
        execute_at_col <
        time - datetime.timedelta(seconds=CONF.scheduler.pickup_job_after)
    )

    # Filter by captured time accounting for a configured captured job timeout.
    min_captured_at = (
        utils.utc_now_sec() -
        datetime.timedelta(seconds=CONF.scheduler.captured_job_timeout)
    )

    query = query.filter(
        sa.or_(
            captured_at_col == sa.null(),
            captured_at_col <= min_captured_at
        )
    )

    query = query.order_by(execute_at_col)
    query = query.limit(batch_size)

    return query.all()
Ejemplo n.º 12
0
def upgrade(migrate_engine):
    meta = sa.MetaData(bind=migrate_engine)
    trait = sa.Table('trait', meta, autoload=True)
    event = sa.Table('event', meta, autoload=True)
    trait_type = sa.Table('trait_type', meta, autoload=True)
    for t_name, t_type, t_nullable, col_name, __ in tables:
        t_table = sa.Table(
            t_name, meta,
            sa.Column('event_id', sa.Integer,
                      sa.ForeignKey(event.c.id), primary_key=True),
            sa.Column('key', sa.String(255), primary_key=True),
            sa.Column('value', t_type, nullable=t_nullable),
            sa.Index('ix_%s_event_id_key' % t_name,
                     'event_id', 'key'),
            mysql_engine='InnoDB',
            mysql_charset='utf8',
        )
        t_table.create()
        query = sa.select(
            [trait.c.event_id,
             trait_type.c.desc,
             trait.c[col_name]]).select_from(
                 trait.join(trait_type,
                            trait.c.trait_type_id == trait_type.c.id)).where(
                                trait.c[col_name] != sa.null())
        if query.alias().select().scalar() is not None:
            t_table.insert().from_select(
                ['event_id', 'key', 'value'], query).execute()
    trait.drop()
    trait_type.drop()
Ejemplo n.º 13
0
    def has_property(self, prop):
        property_granted_select = select(
            [null()],
            from_obj=[
                Property.__table__,
                PropertyGroup.__table__,
                Membership.__table__
            ]
        ).where(
            and_(
                Property.name == prop,
                Property.property_group_id == PropertyGroup.id,
                PropertyGroup.id == Membership.group_id,
                Membership.user_id == self.id,
                Membership.active
            )
        )
        #.cte("property_granted_select")
        return and_(
            not_(exists(
                property_granted_select.where(
                    Property.granted == false())

            )),
            exists(
                property_granted_select.where(
                    Property.granted == true()
                )
            )
        )
Ejemplo n.º 14
0
 def where(self,clauses):
     '''
     sql條件語句
     每一個語句包括字段名稱、操作符、值。
     clause:一個sql條件表達語句,格式為:["and"/"or",[(字段名称,操作符,值),...]]
     '''
     if not clauses:
         return None
     if clauses[0] in _sql_rl:
         rl=_sql_rl[clauses[0]]
     else:
         raise Exception("sql where clause list  must start with 'and' or 'or'")
     wl=[]
     for c in clauses[1]:
         c=list(c)
         if c[0] in _sql_rl:
             wl.append(self.where(c))
         elif isinstance(c,_BinaryExpression):
             wl.append(c)
         elif len(c)==3 and c[1] in _exp_where_op:
             if c[2]=='null':
                 c[2]=null()
             wl.append(self.clause(c))
         else:
             raise Exception("sql clause error,%s" % (clauses))
     return rl(*wl)
Ejemplo n.º 15
0
    def has_property(cls, prop, when=None):
        # TODO Use joins
        property_granted_select = select(
            [null()],
            from_obj=[
                Property.__table__,
                PropertyGroup.__table__,
                Membership.__table__
            ]
        ).where(
            and_(
                Property.name == prop,
                Property.property_group_id == PropertyGroup.id,
                PropertyGroup.id == Membership.group_id,
                Membership.user_id == cls.id,
                Membership.active(when)
            )
        )
        #.cte("property_granted_select")
        return and_(
            not_(exists(
                property_granted_select.where(
                    Property.granted == false())

            )),
            exists(
                property_granted_select.where(
                    Property.granted == true()
                )
            )
        ).self_group().label("has_property_" + prop)
def upgrade():
    """Remove question and answer for startDate from briefs.data for draft dos2 briefs."""
    conn = op.get_bind()

    # SELECT id, data
    # FROM briefs JOIN frameworks ON briefs.framework_id = frameworks.id
    # WHERE frameworks.slug = 'digital-outcomes-and-specialists-2' AND briefs.published_at IS null;
    query = briefs_table.join(
        frameworks_table,
        briefs_table.c.framework_id == frameworks_table.c.id
    ).select(
        sa.and_(
            frameworks_table.c.slug == 'digital-outcomes-and-specialists-2',
            briefs_table.c.published_at == sa.null()
        )
    ).with_only_columns(
        (
            briefs_table.c.id,
            briefs_table.c.data
        )
    )
    results = conn.execute(query).fetchall()

    for brief_id, brief_data in results:
        if brief_data.pop('startDate', None) is not None:
            # UPDATE briefs SET data = _brief_data WHERE id = _brief_id;
            query = briefs_table.update().where(briefs_table.c.id==brief_id).values(data=brief_data)
            conn.execute(query)
Ejemplo n.º 17
0
def migrate_claims(migrate_engine, metadata, buildrequests, objects, buildrequest_claims):

    # First, ensure there is an object row for each master
    new_objects = sa.select(
        [
            sa.null().label("id"),
            buildrequests.c.claimed_by_name.label("name"),
            sa.literal_column("'BuildMaster'").label("class_name"),
        ],
        whereclause=buildrequests.c.claimed_by_name != None,
        distinct=True,
    )

    # this doesn't seem to work without str() -- verified in sqla 0.6.0 - 0.7.1
    migrate_engine.execute(str(sautils.InsertFromSelect(objects, new_objects)))

    # now make a buildrequest_claims row for each claimed build request
    join = buildrequests.join(
        objects,
        (buildrequests.c.claimed_by_name == objects.c.name)
        # (have to use sa.text because str, below, doesn't work
        # with placeholders)
        & (objects.c.class_name == sa.text("'BuildMaster'")),
    )
    claims = sa.select(
        [buildrequests.c.id.label("brid"), objects.c.id.label("objectid"), buildrequests.c.claimed_at],
        from_obj=[join],
        whereclause=buildrequests.c.claimed_by_name != None,
    )
    migrate_engine.execute(str(sautils.InsertFromSelect(buildrequest_claims, claims)))
Ejemplo n.º 18
0
    def _send_password_resets(self):
        db = request.environ['sqlalchemy.session']
        model = request.environ['sqlalchemy.model']
        output = StringIO()

        resets = db.query(model.PasswordResetRequest).filter_by(sent_on=null()).all()

        for reset in resets:
            try:
                c.password_reset_link = h.site_url() + h.url_for(controller='password', action='reset', token=reset.token)
                c.password_reset_code = reset.token
                body = render_jinja('messages/password_reset.jinja')
                message = MIMEText(body.encode('utf-8'), 'plain', 'utf-8')
                message['Subject'] = _("Password reset instruction from %s") % h.site_name()
                message['To'] = reset.user.email
                message['From'] = config['from_address']
                
                ms = EmailSender(to_addresses = message['To'])
                ms.send_mime(message)

                reset.sent_on = datetime.utcnow()
                db.commit()

                output.write(_("Sending password reset e-mail to %s\n") % message['To'])
            except:
                output.write(_("Can't send password reset e-mail: %s\n") % format_exc())
        return output.getvalue()
Ejemplo n.º 19
0
    def _send_password_resets(self):
        db = request.environ['sqlalchemy.session']
        model = request.environ['sqlalchemy.model']

        resets = db.query(model.PasswordResetRequest).filter_by(sent_on=null()).all()
        from email.MIMEText import MIMEText
        from fivecents.lib.mail import EmailSender

        for reset in resets:
            try:
                c.password_reset_link = h.site_url() + h.url_for(controller='password', action='reset', token=reset.token)
                c.password_reset_code = reset.token
                body = render_jinja('messages/password_reset.jinja')
                message = MIMEText(body.encode('utf-8'), 'plain', 'utf-8')
                message['Subject'] = _("Password reset instruction from %s") % h.site_name()
                message['To'] = reset.user.email
                message['From'] = config['from_address']
                
                log.info(_("Sending password reset e-mail to %s") % message['To'])
                ms = EmailSender(to_addresses = message['To'])
                ms.send_mime(message)

                reset.sent_on = datetime.utcnow()
                db.commit()
            except:
                log.error("Can't send password reset e-mail", exc_info=1)
Ejemplo n.º 20
0
def _get_choices(db_session, schema_name, attribute_name):
    """
    Returns the most recent version of every choice ever used for the attribute
    """

    recent_choices_query = (
        db_session.query(datastore.Choice)
        .join(datastore.Choice.attribute)
        .join(datastore.Attribute.schema)
        .add_column(
            sa.func.row_number().over(
                partition_by=datastore.Choice.name,
                order_by=datastore.Schema.publish_date.desc().nullslast()
            ).label('row_number')
        )
        .filter(
            (datastore.Schema.name == schema_name) &
            (datastore.Attribute.name == attribute_name) &
            (datastore.Schema.publish_date != sa.null())
        )
        .subquery()
    )

    query = (
        db_session.query(datastore.Choice)
        .select_entity_from(recent_choices_query)
        .filter(recent_choices_query.c.row_number == 1)
    )

    choices = query.all()

    return choices
Ejemplo n.º 21
0
 def _case(self):
     pairs = set(self.cls.type_map.values())
     whens = [
         (literal_column("'%s'" % discriminator), cast(getattr(self.cls, attribute), String))
         for attribute, discriminator in pairs
         if attribute is not None
     ]
     return case(whens, self.cls.type, null())
Ejemplo n.º 22
0
    def active(cls, when=None):
        """
        Tests if memberships overlap with a given interval. If no interval is
        given, it tests if the memberships are active right now.
        :param Interval when:
        :return:
        """
        if when is None:
            now = session.utcnow()
            when = single(now)

        return and_(
            or_(cls.begins_at == null(), literal(when.end) == null(),
                cls.begins_at <= literal(when.end)),
            or_(literal(when.begin) == null(), cls.ends_at == null(),
                literal(when.begin) <= cls.ends_at)
        ).label("active")
Ejemplo n.º 23
0
    def test_round_trip_none_as_sql_null(self):
        col = self.tables.data_table.c["nulldata"]

        with config.db.connect() as conn:
            conn.execute(self.tables.data_table.insert(), {"name": "r1", "data": None})

            eq_(conn.scalar(select([self.tables.data_table.c.name]).where(col.is_(null()))), "r1")

            eq_(conn.scalar(select([col])), None)
Ejemplo n.º 24
0
def available_schemata(context, request):
    """
    Returns a list of available schemata for the given context

    Criteria for available schemata:
        * Must be configured for a study (i.e NOT patient/enrollment forms)
        * Must NOT be retracted

    GET parameters:
        schema -- (optional) only shows results for specific schema name
                  (useful for searching for a schema's publish dates)
        term -- (optional) filters by schema title or publish date
        grouped -- (optional) groups all results by schema name

    """
    db_session = request.db_session

    class SearchForm(Form):
        term = wtforms.StringField()
        schema = wtforms.StringField()
        grouped = wtforms.BooleanField()

    form = SearchForm(request.GET)
    form.validate()

    query = (
        db_session.query(datastore.Schema)
        # only allow forms that are available to active studies
        .join(models.study_schema_table)
        .join(models.Study)
        .filter(datastore.Schema.retract_date == sa.null()))

    if form.schema.data:
        query = query.filter(datastore.Schema.name == form.schema.data)

    if form.term.data:
        wildcard = u'%' + form.term.data + u'%'
        query = query.filter(
            datastore.Schema.title.ilike(wildcard)
            | sa.cast(datastore.Schema.publish_date,
                      sa.Unicode).ilike(wildcard))

    query = (
        query.order_by(
            datastore.Schema.title,
            datastore.Schema.publish_date.asc())
        .limit(100))

    if form.grouped.data:
        schemata = form2json(query)
    else:
        schemata = [version2json(i) for i in query]

    return {
        '__query__': form.data,
        'schemata': schemata
    }
Ejemplo n.º 25
0
def get_expired_executions(time, session=None):
    query = b.model_query(models.WorkflowExecution)

    # Only WorkflowExecution that are not a child of other WorkflowExecution.
    query = query.filter(models.WorkflowExecution.task_execution_id == sa.null())
    query = query.filter(models.WorkflowExecution.updated_at < time)
    query = query.filter(sa.or_(models.WorkflowExecution.state == "SUCCESS", models.WorkflowExecution.state == "ERROR"))

    return query.all()
Ejemplo n.º 26
0
 def delete_null_duplicates(tablename, columns, notnull, returning=sa.text('*')):
     assert columns
     table = sa.table(tablename, *map(sa.column, ['pk'] + columns))
     any_null = sa.or_(table.c[n] == sa.null() for n in notnull)
     yield table.delete(bind=conn).where(any_null).returning(returning)
     other = table.alias()
     yield table.delete(bind=conn).where(~any_null).returning(returning)\
         .where(sa.exists()
             .where(sa.and_(table.c[c] == other.c[c] for c in columns))
             .where(table.c.pk > other.c.pk))
Ejemplo n.º 27
0
 def markall(self):
     #TODO: This could be done in a way smarter way! TODO: Checkout subqueries in sqlalchemy
     notamupdates=\
         list(meta.Session.query(NotamUpdate).filter(
             NotamUpdate.disappearnotam==sa.null()).order_by(sa.desc(NotamUpdate.appearnotam),sa.asc(NotamUpdate.appearline)).all())
     
     acks=set([(ack.appearnotam,ack.appearline) for ack in meta.Session.query(NotamAck).filter(sa.and_(
             NotamAck.user==session['user'],
             NotamUpdate.disappearnotam==sa.null(),
             NotamAck.appearnotam==NotamUpdate.appearnotam,
             NotamAck.appearline==NotamUpdate.appearline)).all()])
     for u in notamupdates:
         if not ((u.appearnotam,u.appearline) in acks):
             print "Acking ",(u.appearnotam,u.appearline,u.text)
             ack=NotamAck(session['user'],u.appearnotam,u.appearline)
             meta.Session.add(ack)
     meta.Session.flush()
     meta.Session.commit()
     return redirect(h.url_for(controller='notam',action="index"))
Ejemplo n.º 28
0
Archivo: util.py Proyecto: clld/apics
def dataset_detail_html(context=None, request=None, **kw):
    return {
        'stats': context.get_stats(
            [rsc for rsc in RESOURCES if rsc.name
             in 'language contributor parameter sentence'.split()],
            language=Lect.language_pk == null(),
            parameter=and_(Feature.feature_type == 'primary', Parameter.id != '0'),
            contributor=Contributor.contribution_assocs.any()),
        'example_contribution': Contribution.get('58'),
        'citation': get_adapter(IRepresentation, context, request, ext='md.txt')}
Ejemplo n.º 29
0
def get_list_data(request, names=None):
    db_session = request.db_session
    InnerSchema = orm.aliased(datastore.Schema)
    InnerAttribute = orm.aliased(datastore.Attribute)
    query = (
        db_session.query(datastore.Schema.name)
        .add_column(
            db_session.query(
                db_session.query(InnerAttribute)
                .join(InnerSchema, InnerAttribute.schema)
                .filter(InnerSchema.name == datastore.Schema.name)
                .filter(InnerAttribute.is_private)
                .correlate(datastore.Schema)
                .exists())
            .as_scalar()
            .label('has_private'))
        .add_column(
            db_session.query(InnerSchema.title)
            .filter(InnerSchema.name == datastore.Schema.name)
            .order_by(
                InnerSchema.publish_date == sa.null(),
                InnerSchema.publish_date.desc())
            .limit(1)
            .correlate(datastore.Schema)
            .as_scalar()
            .label('title'))
        .group_by(datastore.Schema.name)
        .order_by(datastore.Schema.name))

    if names:
        query = query.filter(datastore.Schema.name.in_(names))

    def jsonify(row):
        values = row._asdict()
        versions = (
            db_session.query(datastore.Schema)
            .filter(datastore.Schema.name == row.name)
            .order_by(datastore.Schema.publish_date == sa.null(),
                      datastore.Schema.publish_date.desc()))
        values['versions'] = [{
            '__url__':  request.route_path(
                'forms.version',
                form=row.name,
                version=version.publish_date or version.id),
            'id': version.id,
            'name': version.name,
            'title': version.title,
            'publish_date': version.publish_date and str(version.publish_date),
            'retract_date': version.retract_date and str(version.retract_date)
        } for version in versions]
        return values

    return {
        'forms': [jsonify(r) for r in query]
    }
 def build_initial_query(self, trans, **kwd):
     return trans.sa_session.query(model.Repository) \
                            .filter(and_(model.Repository.table.c.deleted == false(),
                                         model.Repository.table.c.deprecated == false(),
                                         model.Repository.reviews == null())) \
                            .join(model.RepositoryMetadata.table) \
                            .filter(and_(model.RepositoryMetadata.table.c.downloadable == true(),
                                         or_(model.RepositoryMetadata.table.c.includes_tools == false(),
                                             and_(model.RepositoryMetadata.table.c.includes_tools == true(),
                                                  model.RepositoryMetadata.table.c.tools_functionally_correct == true())))) \
                            .join(model.User.table)
Ejemplo n.º 31
0
    def get_events(self, event_filter, pagination=None):
        """Return an iterable of model.Event objects.

        :param event_filter: EventFilter instance
        :param pagination: Pagination parameters.
        """
        pagination = pagination or {}
        session = self._engine_facade.get_session()
        with session.begin():
            # Build up the join conditions
            event_join_conditions = [
                models.EventType.id == models.Event.event_type_id
            ]

            if event_filter.event_type:
                event_join_conditions.append(
                    models.EventType.desc == event_filter.event_type)

            # Build up the where conditions
            event_filter_conditions = []
            if event_filter.message_id:
                event_filter_conditions.append(
                    models.Event.message_id == event_filter.message_id)
            if event_filter.start_timestamp:
                event_filter_conditions.append(
                    models.Event.generated >= event_filter.start_timestamp)
            if event_filter.end_timestamp:
                event_filter_conditions.append(
                    models.Event.generated <= event_filter.end_timestamp)

            trait_subq = None
            # Build trait filter
            if event_filter.traits_filter:
                filters = list(event_filter.traits_filter)
                trait_filter = filters.pop()
                key = trait_filter.pop('key')
                op = trait_filter.pop('op', 'eq')
                trait_type, value = list(trait_filter.items())[0]
                trait_subq = _build_trait_query(session, trait_type, key,
                                                value, op)
                for trait_filter in filters:
                    key = trait_filter.pop('key')
                    op = trait_filter.pop('op', 'eq')
                    trait_type, value = list(trait_filter.items())[0]
                    q = _build_trait_query(session, trait_type, key, value, op)
                    trait_subq = trait_subq.filter(
                        trait_subq.subquery().c.ev_id == q.subquery().c.ev_id)
                trait_subq = trait_subq.subquery()

            query = (session.query(models.Event.id).join(
                models.EventType, sa.and_(*event_join_conditions)))
            if trait_subq is not None:
                query = query.join(trait_subq,
                                   trait_subq.c.ev_id == models.Event.id)
            if event_filter.admin_proj:
                no_proj_q = session.query(models.TraitText.event_id).filter(
                    models.TraitText.key == 'project_id')
                admin_q = (session.query(
                    models.TraitText.event_id).filter(~sa.exists().where(
                        models.TraitText.event_id ==
                        no_proj_q.subquery().c.event_id)).union(
                            session.query(models.TraitText.event_id).filter(
                                sa.and_(
                                    models.TraitText.key == 'project_id',
                                    models.TraitText.value ==
                                    event_filter.admin_proj, models.Event.id ==
                                    models.TraitText.event_id))))
                query = query.filter(sa.exists().where(
                    models.Event.id ==
                    admin_q.subquery().c.trait_text_event_id))
            if event_filter_conditions:
                query = query.filter(sa.and_(*event_filter_conditions))

            query = self._get_pagination_query(query, pagination,
                                               api_models.Event, models.Event)

            event_list = collections.OrderedDict()
            # get a list of all events that match filters
            for (id_, generated, message_id, desc, raw) in query.add_columns(
                    models.Event.generated, models.Event.message_id,
                    models.EventType.desc, models.Event.raw).all():
                event_list[id_] = api_models.Event(message_id, desc, generated,
                                                   [], raw)

            # Query all traits related to events.
            # NOTE (gordc): cast is done because pgsql defaults to TEXT when
            #               handling unknown values such as null.
            trait_q = (session.query(
                models.TraitDatetime.event_id, models.TraitDatetime.key,
                models.TraitDatetime.value, sa.cast(sa.null(), sa.Integer),
                sa.cast(sa.null(), sa.Float(53)),
                sa.cast(sa.null(), sa.String(255))).filter(sa.exists().where(
                    models.TraitDatetime.event_id == query.subquery().c.id))
                       ).union_all(
                           session.query(models.TraitInt.event_id,
                                         models.TraitInt.key, sa.null(),
                                         models.TraitInt.value, sa.null(),
                                         sa.null()).filter(sa.exists().where(
                                             models.TraitInt.event_id ==
                                             query.subquery().c.id)),
                           session.query(models.TraitFloat.event_id,
                                         models.TraitFloat.key, sa.null(),
                                         sa.null(), models.TraitFloat.value,
                                         sa.null()).filter(sa.exists().where(
                                             models.TraitFloat.event_id ==
                                             query.subquery().c.id)),
                           session.query(models.TraitText.event_id,
                                         models.TraitText.key, sa.null(),
                                         sa.null(), sa.null(),
                                         models.TraitText.value).filter(
                                             sa.exists().where(
                                                 models.TraitText.event_id ==
                                                 query.subquery().c.id)))

            for id_, key, t_date, t_int, t_float, t_text in (trait_q.order_by(
                    models.TraitDatetime.key)).all():
                if t_int is not None:
                    dtype = api_models.Trait.INT_TYPE
                    val = t_int
                elif t_float is not None:
                    dtype = api_models.Trait.FLOAT_TYPE
                    val = t_float
                elif t_date is not None:
                    dtype = api_models.Trait.DATETIME_TYPE
                    val = t_date
                else:
                    dtype = api_models.Trait.TEXT_TYPE
                    val = t_text

                try:
                    trait_model = api_models.Trait(key, dtype, val)
                    event_list[id_].append_trait(trait_model)
                except KeyError:
                    # NOTE(gordc): this is expected as we do not set REPEATABLE
                    # READ (bug 1506717). if query is run while recording new
                    # event data, trait query may return more data than event
                    # query. they can be safely discarded.
                    pass

            return event_list.values()
Ejemplo n.º 32
0
def delete_userless_histories( app, cutoff_time, info_only=False, force_retry=False ):
    # Deletes userless histories whose update_time value is older than the cutoff_time.
    # The purge history script will handle marking DatasetInstances as deleted.
    # Nothing is removed from disk yet.
    history_count = 0
    start = time.time()
    if force_retry:
        histories = app.sa_session.query( app.model.History ) \
                                  .filter( and_( app.model.History.table.c.user_id == null(),
                                                 app.model.History.table.c.update_time < cutoff_time ) )
    else:
        histories = app.sa_session.query( app.model.History ) \
                                  .filter( and_( app.model.History.table.c.user_id == null(),
                                                 app.model.History.table.c.deleted == false(),
                                                 app.model.History.table.c.update_time < cutoff_time ) )
    for history in histories:
        if not info_only:
            print "Deleting history id ", history.id
            history.deleted = True
            app.sa_session.add( history )
            app.sa_session.flush()
        history_count += 1
    stop = time.time()
    print "Deleted %d histories" % history_count
    print "Elapsed time: ", stop - start
    print "##########################################"
Ejemplo n.º 33
0
    # Port mappings
    sa.Column('repl_in_port', sa.Integer(), nullable=False),
    sa.Column('repl_out_port', sa.Integer(), nullable=False),
    sa.Column('stdin_port', sa.Integer(), nullable=False),
    sa.Column('stdout_port', sa.Integer(), nullable=False),

    # Lifecycle
    sa.Column('created_at',
              sa.DateTime(timezone=True),
              server_default=sa.func.now(),
              index=True),
    sa.Column('terminated_at',
              sa.DateTime(timezone=True),
              nullable=True,
              default=sa.null(),
              index=True),
    sa.Column('status',
              EnumType(KernelStatus),
              default=KernelStatus.PREPARING,
              index=True),
    sa.Column('status_info', sa.Unicode(), nullable=True, default=sa.null()),

    # Live stats
    sa.Column('num_queries', sa.BigInteger(), default=0),
    sa.Column('cpu_used', sa.BigInteger(), default=0),  # msec
    sa.Column('mem_max_bytes', sa.BigInteger(), default=0),
    sa.Column('net_rx_bytes', sa.BigInteger(), default=0),
    sa.Column('net_tx_bytes', sa.BigInteger(), default=0),
    sa.Column('io_read_bytes', sa.BigInteger(), default=0),
    sa.Column('io_write_bytes', sa.BigInteger(), default=0),
Ejemplo n.º 34
0
class UserMixin(object):
    """Generic mixin for user entities."""
    # These two attributes are needed by Flask-Login.
    is_anonymous = False
    is_authenticated = True

    is_enabled = sa.Column(sa.Boolean, nullable=False, default=True)
    is_superuser = sa.Column(sa.Boolean, nullable=False, default=False)
    password = sa.Column(KAPasswordType(onload=_create_cryptcontext_kwargs))

    username = sa.Column(sa.Unicode(512), nullable=False, unique=True)

    # key used to identify the "id" for flask-login, which is expected to be a string. While we
    #   could return the user's db id cast to string, that would not give us a hook to reset
    #   sessions when permissions go stale
    session_key = sa.Column(sa.Unicode(36),
                            nullable=False,
                            unique=True,
                            default=_generate_session_key)

    # When a user logins we need to track their last login time
    # This is used in the salt to invalidate a password/verification token
    # when a user logs in.
    last_login_utc = sa.Column(ArrowType,
                               nullable=True,
                               default=None,
                               server_default=None)

    # The datetime when a user will be disabled. User will be inactive if this is set
    # to a datetime in the past.
    disabled_utc = sa.Column(ArrowType,
                             nullable=True,
                             default=None,
                             server_default=sa.null())

    # is_active defines the complexities of how to determine what users are active. For instance,
    #   if email is in scope, we need to have an additional flag to verify users, and that would
    #   get included in is_active logic.
    @hybrid_property
    def is_active(self):
        return not self.is_disabled_by_date and self.is_enabled

    @is_active.expression
    def is_active(cls):
        expr = sa_sql.and_(~cls.is_disabled_by_date,
                           cls.is_enabled == sa.true())
        # need to wrap the expression in a case to work with MSSQL
        return sa_sql.case([(expr, sa.true())], else_=sa.false())

    @hybrid_property
    def is_disabled_by_date(self):
        return self.disabled_utc is not None and self.disabled_utc <= arrow.utcnow(
        )

    @is_disabled_by_date.expression
    def is_disabled_by_date(cls):
        is_disabled_expr = sa.sql.and_(
            cls.disabled_utc.isnot(None),
            cls.disabled_utc <= arrow.utcnow(),
        )
        return sa_sql.case([(is_disabled_expr, sa.true())], else_=sa.false())

    def get_id(self):
        # Flask-Login requires that this return a string value for the session
        return str(self.session_key)

    def reset_session_key(self):
        self.session_key = _generate_session_key()

    @property
    def display_value(self):
        # shortcut to return the value of the user ident attribute
        return self.username

    @classmethod
    def testing_create(cls, **kwargs):
        kwargs['password'] = kwargs.get('password') or randchars()

        if 'permissions' in kwargs:
            perm_cls = registry().permission_cls

            # ensure all of the tokens exists
            flask.current_app.auth_manager.validate_permission_set(
                list(
                    filter(lambda perm: not isinstance(perm, perm_cls),
                           tolist(kwargs['permissions']))))

            kwargs['permissions'] = [
                perm_cls.testing_create(
                    token=perm) if not isinstance(perm, perm_cls) else perm
                for perm in tolist(kwargs['permissions'])
            ]

        user = super(UserMixin, cls).testing_create(**kwargs)
        user._plaintext_pass = kwargs['password']
        return user

    def get_all_permissions(self):
        # Superusers are considered to have all permissions.
        if self.is_superuser:
            return set(registry().permission_cls.query)

        perm_cls = registry().permission_cls
        mapping = self._query_permission_mapping().alias(
            'user_permission_mapping')
        q = db.session.query(perm_cls).select_from(perm_cls).join(
            mapping, mapping.c.perm_id == perm_cls.id).filter(
                mapping.c.user_id == self.id)
        return set(q)

    def get_all_permission_tokens(self):
        # permission tokens for a given user should get loaded once per session. This method, called
        #   by has_all_permissions, is the main interface to grab them. So, set up a cache here, so
        #   the user instance stored by flask-login will hold the set to be used (rather than
        #   continuing to query the database on each permission check)
        # the other side to this is that permissions can become stale, because we are not querying
        #   the database every time. If an admin changes permissions while a user is actively
        #   logged in, we have to make sure the session is invalidated (see session_key field)
        if not hasattr(self, '_permission_cache'):
            self._permission_cache = {
                p.token
                for p in self.get_all_permissions()
            }
        return self._permission_cache

    def has_all_permissions(self, *tokens):
        return set(tokens).issubset(self.get_all_permission_tokens())

    def has_any_permission(self, *tokens):
        return bool(set(tokens).intersection(self.get_all_permission_tokens()))

    @classmethod
    def _query_permission_mapping(cls):
        return sa.union(cls._query_direct_permissions(),
                        cls._query_bundle_permissions(),
                        cls._query_group_permissions())

    @classmethod
    def _query_direct_permissions(cls):
        perm_cls = registry().permission_cls
        return db.session.query(
            cls.id.label('user_id'),
            perm_cls.id.label('perm_id'),
        ).select_from(cls).join(cls.permissions)

    @classmethod
    def _query_bundle_permissions(cls):
        perm_cls = registry().permission_cls
        bundle_cls = registry().bundle_cls
        return db.session.query(
            cls.id.label('user_id'),
            perm_cls.id.label('perm_id'),
        ).select_from(cls).join(cls.bundles).join(bundle_cls.permissions)

    @classmethod
    def _query_group_permissions(cls):
        group_cls = registry().group_cls

        group_mapping = group_cls._query_permission_mapping().alias(
            'group_permissions_mapping')
        return db.session.query(
            cls.id.label('user_id'),
            group_mapping.c.perm_id.label('perm_id')).select_from(cls).join(
                cls.groups).join(group_mapping,
                                 group_mapping.c.group_id == group_cls.id)

    def get_token_salt(self):
        """
        Create salt data for password reset token signing. The return value will be hashed
        together with the signing key. This ensures that changes to any of the fields included
        in the salt invalidates any tokens produced with the old values
        Values included:

            * user login identifier -> if username/email change it will invalidate
                                       the user token
            * is_active -> Anytime a user verifies will invalidate a token

            * current password hash or empty string if no password has been set
              -> If the password is updated we want to invalidate the token

            * last login time -> Any time a user logs in it will invalidate any
                                 verification and reset password emails

        :return: JSON string of list containing the values listed above
        """
        return json.dumps([
            self.display_value,
            str(self.is_active),
            self.password.hash.decode() if self.password is not None else '',
            self.last_login_utc.to('UTC').isoformat()
            if self.last_login_utc else None
        ])

    def get_token_serializer(self, expires_in):
        """
        Create a JWT serializer using itsdangerous
        :param expires_in: seconds from token creation until expiration
        :return: TimedJSONWebSignatureSerializer instance
        """
        return itsdangerous.TimedJSONWebSignatureSerializer(
            flask.current_app.config['SECRET_KEY'],
            algorithm_name='HS512',
            expires_in=expires_in,
            signer_kwargs={'digest_method': hashlib.sha512})

    def token_verify(self, token):
        """
        Verify a password reset token. The token is validated for:
            * user identity
            * tampering
            * expiration
            * password was not already reset since token was generated
            * user has not signed in since token was generated

        :param token: string representation of token to verify
        :return: bool indicating token validity
        """
        if not token:
            return False
        if isinstance(token, str):
            token = token.encode()

        serializer = self.get_token_serializer(None)
        try:
            payload = serializer.loads(token, salt=self.get_token_salt())
        except itsdangerous.BadSignature:
            return False
        return payload['user_id'] == self.id

    def token_generate(self):
        """
        Create a new token for this user. The returned value is an expiring JWT
        signed with the application's crypto key. Externally this token should be treated as opaque.
        The value returned by this function must not be persisted.
        :return: a string representation of the generated token
        """
        serializer = self.get_token_serializer(
            flask.current_app.config['KEGAUTH_TOKEN_EXPIRE_MINS'] * 60)
        payload = {
            'user_id': self.id,
        }
        token = serializer.dumps(payload, salt=self.get_token_salt()).decode()

        # Store the plain text version on this instance for ease of use.  It will not get
        # pesisted to the db, so no security conern.
        self._token_plain = token

        return token
Ejemplo n.º 35
0
def available_species(term):
    '''Generate query for available species'''
    return db.session.query(distinct(Taxa.species), null()) \
             .filter(Taxa.species.ilike('{}%'.format(term))) \
             .order_by(Taxa.species)
Ejemplo n.º 36
0
def _df2list(raw_data):
    if isinstance(raw_data, pd.DataFrame):
        return [record.to_dict() for _, record in raw_data.fillna(sa.null()).iterrows()]
    else:
        return raw_data
Ejemplo n.º 37
0
def _not_null(t, expr):
    arg = t.translate(expr.op().args[0])
    return arg.isnot(sa.null())
    def __init__(self):
        super(AlarmDefinitionsRepository, self).__init__()

        metadata = MetaData()
        self.a = models.create_a_model(metadata)
        self.aa = models.create_aa_model(metadata)
        self.ad = models.create_ad_model(metadata)
        self.am = models.create_am_model(metadata)
        self.nm = models.create_nm_model(metadata)
        self.md = models.create_md_model(metadata)
        self.mde = models.create_mde_model(metadata)
        self.mdd = models.create_mdd_model(metadata)
        self.sa = models.create_sa_model(metadata)
        self.sad = models.create_sad_model(metadata)
        self.sadd = models.create_sadd_model(metadata)
        a = self.a
        aa = self.aa
        ad = self.ad
        am = self.am
        nm = self.nm
        md = self.md
        sa = self.sa
        mdd = self.mdd
        mde = self.mde
        sad = self.sad
        sadd = self.sadd

        a_s = a.alias('a')
        ad_s = ad.alias('ad')
        self.ad_s = ad_s
        am_s = am.alias('am')
        nm_s = nm.alias('nm')
        md_s = md.alias('md')
        sa_s = sa.alias('sa')
        mdd_s = mdd.alias('mdd')
        mde_s = mde.alias('mde')
        sad_s = sad.alias('sad')
        sadd_s = sadd.alias('sadd')

        aaa_aa = aa.alias('aaa_aa')
        aaa = (select([
            aaa_aa.c.alarm_definition_id,
            models.group_concat([aaa_aa.c.action_id]).label('alarm_actions')
        ]).select_from(aaa_aa).where(
            aaa_aa.c.alarm_state == text("'ALARM'")).group_by(
                aaa_aa.c.alarm_definition_id).alias('aaa'))

        aao_aa = aa.alias('aao_aa')
        aao = (select([
            aao_aa.c.alarm_definition_id,
            models.group_concat([aao_aa.c.action_id]).label('ok_actions')
        ]).select_from(aao_aa).where(
            aao_aa.c.alarm_state == text("'OK'")).group_by(
                aao_aa.c.alarm_definition_id).alias('aao'))

        aau_aa = aa.alias('aau_aa')
        aau = (select([
            aau_aa.c.alarm_definition_id,
            models.group_concat([aau_aa.c.action_id
                                 ]).label('undetermined_actions')
        ]).select_from(aau_aa).where(
            aau_aa.c.alarm_state == text("'UNDETERMINED'")).group_by(
                aau_aa.c.alarm_definition_id).alias('aau'))

        self.base_query_from = (ad_s.outerjoin(
            aaa, aaa.c.alarm_definition_id == ad_s.c.id).outerjoin(
                aao, aao.c.alarm_definition_id == ad_s.c.id).outerjoin(
                    aau, aau.c.alarm_definition_id == ad_s.c.id))

        self.base_query = (select([
            ad_s.c.id, ad_s.c.name, ad_s.c.description, ad_s.c.expression,
            ad_s.c.match_by, ad_s.c.severity, ad_s.c.actions_enabled,
            aaa.c.alarm_actions, aao.c.ok_actions, aau.c.undetermined_actions
        ]))

        self.get_sub_alarms_query = (select([
            sa_s.c.id.label('sub_alarm_id'), sa_s.c.alarm_id, sa_s.c.expression
        ]).select_from(
            sa_s.join(a_s, a_s.c.id == sa_s.c.alarm_id).join(
                ad_s, ad_s.c.id == a_s.c.alarm_definition_id)).where(
                    ad_s.c.tenant_id == bindparam('b_tenant_id')).where(
                        ad_s.c.id == bindparam('b_id')).distinct())

        mdg = (select([
            md_s.c.dimension_set_id,
            models.group_concat([md_s.c.name + text("'='") + md_s.c.value
                                 ]).label('dimensions')
        ]).select_from(md_s).group_by(md_s.c.dimension_set_id).alias('mdg'))

        self.get_alarm_metrics_query = (select([
            a_s.c.id.label('alarm_id'), mde_s.c.name, mdg.c.dimensions
        ]).select_from(
            a_s.join(ad_s, ad_s.c.id == a_s.c.alarm_definition_id).join(
                am_s, am_s.c.alarm_id == a_s.c.id).join(
                    mdd_s,
                    mdd_s.c.id == am_s.c.metric_definition_dimensions_id).join(
                        mde_s,
                        mde_s.c.id == mdd_s.c.metric_definition_id).outerjoin(
                            mdg, mdg.c.dimension_set_id ==
                            mdd_s.c.metric_dimension_set_id)
        ).where(ad_s.c.tenant_id == bindparam('b_tenant_id')).where(
            ad_s.c.id == bindparam('b_id')).order_by(a_s.c.id).distinct())

        self.soft_delete_ad_query = (update(ad).where(
            ad.c.tenant_id == bindparam('b_tenant_id')).where(
                ad.c.id == bindparam('b_id')).where(
                    ad.c.deleted_at == null()).values(
                        deleted_at=datetime.datetime.utcnow()))

        self.delete_a_query = (delete(a).where(
            a.c.alarm_definition_id == bindparam('b_id')))

        columns_gc = [sadd_s.c.dimension_name + text("'='") + sadd_s.c.value]
        saddg = (select([
            sadd_s.c.sub_alarm_definition_id,
            models.group_concat(columns_gc).label('dimensions')
        ]).select_from(sadd_s).group_by(
            sadd_s.c.sub_alarm_definition_id).alias('saddg'))

        self.get_sub_alarm_definitions_query = (select([
            sad_s, saddg.c.dimensions
        ]).select_from(
            sad_s.outerjoin(
                saddg, saddg.c.sub_alarm_definition_id == sad_s.c.id)).where(
                    sad_s.c.alarm_definition_id == bindparam(
                        'b_alarm_definition_id')))

        self.create_alarm_definition_insert_ad_query = (insert(ad).values(
            id=bindparam('b_id'),
            tenant_id=bindparam('b_tenant_id'),
            name=bindparam('b_name'),
            description=bindparam('b_description'),
            expression=bindparam('b_expression'),
            severity=bindparam('b_severity'),
            match_by=bindparam('b_match_by'),
            actions_enabled=bindparam('b_actions_enabled'),
            created_at=bindparam('b_created_at'),
            updated_at=bindparam('b_updated_at')))

        self.create_alarm_definition_insert_sad_query = (insert(sad).values(
            id=bindparam('b_id'),
            alarm_definition_id=bindparam('b_alarm_definition_id'),
            function=bindparam('b_function'),
            metric_name=bindparam('b_metric_name'),
            operator=bindparam('b_operator'),
            threshold=bindparam('b_threshold'),
            period=bindparam('b_period'),
            periods=bindparam('b_periods'),
            is_deterministic=bindparam('b_is_deterministic'),
            created_at=bindparam('b_created_at'),
            updated_at=bindparam('b_updated_at')))

        b_sad_id = bindparam('b_sub_alarm_definition_id')
        self.create_alarm_definition_insert_sadd_query = (insert(sadd).values(
            sub_alarm_definition_id=b_sad_id,
            dimension_name=bindparam('b_dimension_name'),
            value=bindparam('b_value')))

        self.update_or_patch_alarm_definition_update_ad_query = (
            update(ad).where(ad.c.tenant_id == bindparam('b_tenant_id')).where(
                ad.c.id == bindparam('b_id')))

        self.update_or_patch_alarm_definition_delete_sad_query = (
            delete(sad).where(sad.c.id == bindparam('b_id')))

        self.update_or_patch_alarm_definition_update_sad_query = (
            update(sad).where(sad.c.id == bindparam('b_id')).values(
                operator=bindparam('b_operator'),
                threshold=bindparam('b_threshold'),
                is_deterministic=bindparam('b_is_deterministic'),
                updated_at=bindparam('b_updated_at')))

        b_ad_id = bindparam('b_alarm_definition_id'),
        self.update_or_patch_alarm_definition_insert_sad_query = (
            insert(sad).values(
                id=bindparam('b_id'),
                alarm_definition_id=b_ad_id,
                function=bindparam('b_function'),
                metric_name=bindparam('b_metric_name'),
                operator=bindparam('b_operator'),
                threshold=bindparam('b_threshold'),
                period=bindparam('b_period'),
                periods=bindparam('b_periods'),
                is_deterministic=bindparam('b_is_deterministic'),
                created_at=bindparam('b_created_at'),
                updated_at=bindparam('b_updated_at')))

        self.update_or_patch_alarm_definition_insert_sadd_query = (
            insert(sadd).values(sub_alarm_definition_id=b_sad_id,
                                dimension_name=bindparam('b_dimension_name'),
                                value=bindparam('b_value')))

        self.delete_aa_query = (delete(aa).where(
            aa.c.alarm_definition_id == bindparam('b_alarm_definition_id')))

        self.delete_aa_state_query = (delete(aa).where(
            aa.c.alarm_definition_id == bindparam('b_alarm_definition_id')
        ).where(aa.c.alarm_state == bindparam('b_alarm_state')))

        self.select_nm_query = (select([
            nm_s.c.id
        ]).select_from(nm_s).where(nm_s.c.id == bindparam('b_id')))

        self.insert_aa_query = (insert(aa).values(
            alarm_definition_id=bindparam('b_alarm_definition_id'),
            alarm_state=bindparam('b_alarm_state'),
            action_id=bindparam('b_action_id')))
Ejemplo n.º 39
0
def content(table, version, fin, user):
    sess = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            table + '_' + version + '_general_import.csv', user)
        f = open(running_name, mode='w', newline='')
        w = csv.writer(f, lineterminator='\n')

        reader = iter(csv.reader(fin))
        next(reader)
        if table == 'Line_Loss_Factor_Class':
            VOLTAGE_LEVEL_CODES = set(
                [v.code for v in sess.query(VoltageLevel)])
            DNO_MAP = dict(
                (dno.participant.code, dno) for dno in sess.query(Party).
                join(MarketRole).filter(MarketRole.code == 'R').options(
                    joinedload(Party.participant)))
            for i, values in enumerate(reader):
                participant_code = values[0]
                # market_role_code = values[1]
                llfc_code = values[3].zfill(3)
                valid_from = parse_date(values[4])
                description = values[5]
                is_import = values[6] in ('A', 'B')
                is_substation = any(
                    p in description for p in (
                        '_SS', ' SS', ' S/S',  '(S/S)', 'sub', 'Sub'))

                valid_to = parse_to_date(values[7])
                voltage_level_code = 'LV'
                description_upper = description.upper()
                for vl_code in VOLTAGE_LEVEL_CODES:
                    if vl_code in description_upper:
                        voltage_level_code = vl_code
                        break

                try:
                    dno = DNO_MAP[participant_code]
                except KeyError:
                    w.writerow(
                        (
                            "# There is no DNO with participant code ",
                            participant_code))
                    continue

                llfc = sess.query(Llfc).filter(
                    Llfc.dno == dno, Llfc.code == llfc_code,
                    Llfc.valid_from == valid_from).first()
                if llfc is None:
                    w.writerow(
                        (
                            'insert', 'llfc', dno.dno_code, llfc_code,
                            description, voltage_level_code, is_substation,
                            is_import, hh_format(valid_from),
                            hh_format(valid_to, ongoing_str='')))
                elif any(
                        (
                            description != llfc.description,
                            voltage_level_code != llfc.voltage_level.code,
                            is_substation != llfc.is_substation,
                            is_import != llfc.is_import,
                            valid_to != llfc.valid_to)):
                    w.writerow(
                        (
                            'update', 'llfc', dno.dno_code, llfc.code,
                            hh_format(llfc.valid_from), description,
                            voltage_level_code, is_substation, is_import,
                            hh_format(valid_to, ongoing_str='')))

        elif table == 'Market_Participant':
            for i, values in enumerate(reader):
                participant_code = values[0]
                participant_name = values[1]

                participant = sess.query(Participant).filter(
                    Participant.code == participant_code).first()

                if participant is None:
                    w.writerow(
                        (
                            'insert', 'participant', participant_code,
                            participant_name))

                elif participant_name != participant.name:
                    w.writerow(
                        (
                            'update', 'participant', participant_code,
                            participant_name))

        elif table == 'Market_Role':
            for i, values in enumerate(reader):
                role_code = values[0]
                role_description = values[1]

                role = sess.query(MarketRole).filter(
                    MarketRole.code == role_code).first()

                if role is None:
                    w.writerow(
                        (
                            'insert', 'market_role', role_code,
                            role_description))

                elif role_description != role.description:
                    w.writerow(
                        (
                            'update', 'market_role', role_code,
                            role_description))

        elif table == 'Market_Participant_Role':
            for i, values in enumerate(reader):
                participant_code = values[0]
                market_role_code = values[1]
                valid_from = parse_date(values[2])
                party = sess.query(Party).join(Participant). \
                    join(MarketRole).filter(
                        Party.valid_from == valid_from,
                        Participant.code == participant_code,
                        MarketRole.code == market_role_code).first()
                valid_to = parse_to_date(values[3])
                name = values[4]
                dno_code_str = values[14]
                dno_code = None if len(dno_code_str) == 0 else dno_code_str
                if dno_code == '99':
                    continue

                if party is None:
                    w.writerow(
                        (
                            'insert', 'party', market_role_code,
                            participant_code, name, hh_format(valid_from),
                            hh_format(valid_to, ongoing_str=''),
                            dno_code_str))
                elif any(
                        (
                            name != party.name, dno_code != party.dno_code,
                            valid_to != party.valid_to)):
                    w.writerow(
                        (
                            'update', 'party', market_role_code,
                            participant_code, name, hh_format(valid_from),
                            hh_format(valid_to, ongoing_str=''),
                            dno_code_str))

        elif table == 'Meter_Timeswitch_Class':
            for i, values in enumerate(reader):
                code = values[0].zfill(3)
                valid_from = parse_date(values[1])
                valid_to = parse_to_date(values[2])
                description = values[3]
                is_common = values[4] == 'T'
                has_related_metering = values[5] == 'T'
                meter_type_code = values[6]
                meter_payment_type_code = values[7]
                has_comms = values[8] == 'T'
                is_hh = values[9] == 'H'
                tpr_count_str = values[10]
                tpr_count = 0 if tpr_count_str == '' else int(tpr_count_str)

                if is_common:
                    mtc = sess.query(Mtc).filter(
                        Mtc.dno == null(), Mtc.code == code,
                        Mtc.valid_from == valid_from).first()
                    if mtc is None:
                        w.writerow(
                            (
                                'insert', 'mtc', '', code, description,
                                has_related_metering, has_comms, is_hh,
                                meter_type_code, meter_payment_type_code,
                                tpr_count, hh_format(valid_from),
                                hh_format(valid_to, ongoing_str='')))
                    elif any(
                            (
                                description != mtc.description,
                                has_related_metering !=
                                mtc.has_related_metering,
                                has_comms != mtc.has_comms,
                                is_hh != mtc.is_hh,
                                meter_type_code != mtc.meter_type.code,
                                meter_payment_type_code !=
                                mtc.meter_payment_type.code,
                                tpr_count != mtc.tpr_count,
                                valid_to != mtc.valid_to)):
                        w.writerow(
                            (
                                'update', 'mtc', '', mtc.code, description,
                                has_related_metering, has_comms, is_hh,
                                meter_type_code, meter_payment_type_code,
                                tpr_count, hh_format(mtc.valid_from),
                                hh_format(valid_to, ongoing_str='')))

        elif table == 'MTC_in_PES_Area':
            dnos = dict(
                (p.participant.code, (p.id, p.dno_code)) for p in sess.query(
                    Party).join(Participant).join(MarketRole).filter(
                    MarketRole.code == 'R').options(
                    joinedload(Party.participant)))
            mtcs = dict(
                ((m.dno_id, m.code, m.valid_from), m)
                for m in sess.query(Mtc).options(
                    joinedload(Mtc.meter_type),
                    joinedload(Mtc.meter_payment_type)).all())
            for i, values in enumerate(reader):
                code_str = values[0]
                if not Mtc.has_dno(code_str):
                    continue

                code_int = int(code_str)
                code = code_str.zfill(3)
                participant_code = values[2]
                dno_id, dno_code = dnos[participant_code]
                valid_from = parse_date(values[3])
                valid_to = parse_to_date(values[4])
                description = values[5]
                meter_type_code = values[6]
                meter_payment_type_code = values[7]
                has_related_metering = code_int > 500
                has_comms = values[8] == 'Y'
                is_hh = values[9] == 'H'
                tpr_count_str = values[10]
                tpr_count = 0 if tpr_count_str == '' else int(tpr_count_str)

                mtc = mtcs.get((dno_id, code, valid_from))

                if mtc is None:
                    w.writerow(
                        (
                            'insert', 'mtc', dno_code, code, description,
                            has_related_metering, has_comms, is_hh,
                            meter_type_code, meter_payment_type_code,
                            tpr_count, hh_format(valid_from),
                            hh_format(valid_to, ongoing_str='')))
                elif any(
                        (
                            description != mtc.description,
                            has_related_metering != mtc.has_related_metering,
                            has_comms != mtc.has_comms,
                            is_hh != mtc.is_hh,
                            meter_type_code != mtc.meter_type.code,
                            meter_payment_type_code !=
                            mtc.meter_payment_type.code,
                            tpr_count != mtc.tpr_count,
                            valid_to != mtc.valid_to)):
                    w.writerow(
                        (
                            'update', 'mtc', mtc.dno.dno_code, mtc.code,
                            description, has_related_metering, has_comms,
                            is_hh, meter_type_code, meter_payment_type_code,
                            tpr_count, hh_format(mtc.valid_from),
                            hh_format(valid_to, ongoing_str='')))

        elif table == 'MTC_Meter_Type':
            for i, values in enumerate(reader):
                code = values[0]
                description = values[1]
                valid_from = parse_date(values[2])
                valid_to = parse_to_date(values[3])
                pt = sess.query(MeterType).filter(
                    MeterType.code == code,
                    MeterType.valid_from == valid_from).first()
                if pt is None:
                    w.writerow(
                        (
                            'insert', 'meter_type', code, description,
                            hh_format(valid_from),
                            hh_format(valid_to, ongoing_str='')))

                elif (description, valid_from, valid_to) != (
                        pt.description, pt.valid_from, pt.valid_to):
                    w.writerow(
                        (
                            'update', 'meter_type', code, description,
                            hh_format(valid_from), hh_format(valid_to)))
        else:
            raise Exception("The table " + table + " is not recognized.")
    except BaseException:
        w.writerow([traceback.format_exc()])
    finally:
        if sess is not None:
            sess.close()
        if f is not None:
            f.close()
            os.rename(running_name, finished_name)
    def escape(self, value):
        """Escapes value using engine's conversion function.
        https://docs.sqlalchemy.org/en/latest/core/type_api.html#sqlalchemy.types.TypeEngine.literal_processor

        :param value: The value to be sanitized

        :returns: The sanitized value
        """
        # pylint: disable=too-many-return-statements
        if isinstance(value, (list, tuple)):
            return self.escape_iterable(value)

        if isinstance(value, bool):
            return sqlparse.sql.Token(
                sqlparse.tokens.Number,
                sqlalchemy.types.Boolean().literal_processor(
                    self._dialect)(value))

        if isinstance(value, bytes):
            if self._dialect.name in {"mysql", "sqlite"}:
                # https://dev.mysql.com/doc/refman/8.0/en/hexadecimal-literals.html
                return sqlparse.sql.Token(sqlparse.tokens.Other,
                                          f"x'{value.hex()}'")
            if self._dialect.name in {"postgres", "postgresql"}:
                # https://dba.stackexchange.com/a/203359
                return sqlparse.sql.Token(sqlparse.tokens.Other,
                                          f"'\\x{value.hex()}'")

            raise RuntimeError(f"unsupported value: {value}")

        string_processor = sqlalchemy.types.String().literal_processor(
            self._dialect)
        if isinstance(value, datetime.date):
            return sqlparse.sql.Token(
                sqlparse.tokens.String,
                string_processor(value.strftime("%Y-%m-%d")))

        if isinstance(value, datetime.datetime):
            return sqlparse.sql.Token(
                sqlparse.tokens.String,
                string_processor(value.strftime("%Y-%m-%d %H:%M:%S")))

        if isinstance(value, datetime.time):
            return sqlparse.sql.Token(
                sqlparse.tokens.String,
                string_processor(value.strftime("%H:%M:%S")))

        if isinstance(value, float):
            return sqlparse.sql.Token(
                sqlparse.tokens.Number,
                sqlalchemy.types.Float().literal_processor(
                    self._dialect)(value))

        if isinstance(value, int):
            return sqlparse.sql.Token(
                sqlparse.tokens.Number,
                sqlalchemy.types.Integer().literal_processor(
                    self._dialect)(value))

        if isinstance(value, str):
            return sqlparse.sql.Token(sqlparse.tokens.String,
                                      string_processor(value))

        if value is None:
            return sqlparse.sql.Token(sqlparse.tokens.Keyword,
                                      sqlalchemy.null())

        raise RuntimeError(f"unsupported value: {value}")
Ejemplo n.º 41
0
async def post_form_record(
        form_id: str = Path(..., regex="^[0-9a-fA-F]{32}$"),
        week: int = Path(..., ge=1, le=200),
        boss: int = Path(..., ge=1, le=5),
        record: schemas.PostRecord = ...,
        user_id: int = Depends(oauth.get_current_user_id),
        db: Session = Depends(get_db),
):
    """
    Add or update a record\n
    It will try to update exist record if request include an id.
    """
    formData = db.query(models.Form).filter(models.Form.id == form_id).first()
    if not formData:
        raise HTTPException(404, "Form Not Exist")
    if formData.status != 0:
        raise HTTPException(403, "Form Locked")

    teamJson = jsonable_encoder(record.team) if record.team else null()
    if record.id:
        record_data = (db.query(
            models.Record).filter(models.Record.form_id == form_id).filter(
                models.Record.user_id == user_id).filter(
                    models.Record.id == record.id).filter(
                        models.Record.status != 99).first())
        if not record_data:
            raise HTTPException(404, "Record Not Exist")

        record_data.status = record.status.value
        record_data.damage = record.damage
        record_data.comment = record.comment
        record_data.team = teamJson
        record_data.last_modified = datetime.utcnow()
        db.commit()
        data = jsonable_encoder(schemas.AllRecord(**record_data.as_dict()))
        await sio.emit("FormTracker", {
            "type": "RecUP",
            "data": data
        },
                       room=form_id)
        return data
    else:
        record_data = models.Record(
            form_id=form_id,
            month=record.month if record.month else formData.month,
            week=week,
            boss=boss,
            status=record.status.value,
            damage=record.damage,
            comment=record.comment,
            user_id=user_id,
            team=teamJson,
        )
        db.add(record_data)
        db.commit()
    data = jsonable_encoder(schemas.AllRecord(**record_data.as_dict()))
    await sio.emit("FormTracker", {
        "type": "RecNEW",
        "data": data
    },
                   room=form_id)
    return data
Ejemplo n.º 42
0
    # Port mappings
    # If kernel_host is NULL, it is assumed to be same to the agent host or IP.
    sa.Column('kernel_host', sa.String(length=128), nullable=True),
    sa.Column('repl_in_port', sa.Integer(), nullable=False),
    sa.Column('repl_out_port', sa.Integer(), nullable=False),
    sa.Column('stdin_port', sa.Integer(), nullable=False),   # legacy for stream_pty
    sa.Column('stdout_port', sa.Integer(), nullable=False),  # legacy for stream_pty
    sa.Column('service_ports', pgsql.JSONB(), nullable=True),
    sa.Column('preopen_ports', sa.ARRAY(sa.Integer), nullable=True),

    # Lifecycle
    sa.Column('created_at', sa.DateTime(timezone=True),
              server_default=sa.func.now(), index=True),
    sa.Column('terminated_at', sa.DateTime(timezone=True),
              nullable=True, default=sa.null(), index=True),
    sa.Column('starts_at', sa.DateTime(timezone=True),
              nullable=True, default=sa.null()),
    sa.Column('status', EnumType(KernelStatus),
              default=KernelStatus.PENDING,
              server_default=KernelStatus.PENDING.name,
              nullable=False, index=True),
    sa.Column('status_changed', sa.DateTime(timezone=True), nullable=True, index=True),
    sa.Column('status_info', sa.Unicode(), nullable=True, default=sa.null()),
    sa.Column('startup_command', sa.Text, nullable=True),
    sa.Column('result', EnumType(SessionResult),
              default=SessionResult.UNDEFINED,
              server_default=SessionResult.UNDEFINED.name,
              nullable=False, index=True),
    sa.Column('internal_data', pgsql.JSONB(), nullable=True),
    sa.Column('container_log', sa.LargeBinary(), nullable=True),
    def get_alarm_definitions(self,
                              tenant_id,
                              name=None,
                              dimensions=None,
                              severity=None,
                              sort_by=None,
                              offset=None,
                              limit=1000):

        with self._db_engine.connect() as conn:
            ad = self.ad_s
            sad = self.sad.alias('sad')
            sadd = self.sadd.alias('sadd')
            query_from = self.base_query_from

            parms = {'b_tenant_id': tenant_id}

            if dimensions:
                sadi = sad.c.alarm_definition_id
                query_from = query_from.join(sad, sadi == ad.c.id)

                i = 0
                for n, v in dimensions.items():
                    bind_dimension_name = 'b_sadd_dimension_name_{}'.format(i)
                    bind_value = 'b_sadd_value_{}'.format(i)
                    sadd_ = (select([
                        sadd.c.sub_alarm_definition_id
                    ]).select_from(sadd).where(
                        sadd.c.dimension_name == bindparam(bind_dimension_name)
                    ).where(sadd.c.value == bindparam(
                        bind_value)).distinct().alias('saad_{}'.format(i)))

                    sadd_id = sadd_.c.sub_alarm_definition_id
                    query_from = query_from.join(sadd_, sadd_id == sad.c.id)
                    parms[bind_dimension_name] = n.encode(
                        'utf8') if six.PY2 else n
                    parms[bind_value] = v.encode('utf8') if six.PY2 else v

                    i += 1

            query = (self.base_query.select_from(query_from).where(
                ad.c.tenant_id == bindparam('b_tenant_id')).where(
                    ad.c.deleted_at == null()))

            if name:
                query = query.where(ad.c.name == bindparam('b_name'))
                parms['b_name'] = name.encode('utf-8') if six.PY2 else name

            if severity:
                severities = severity.split('|')
                query = query.where(
                    or_(ad.c.severity == bindparam('b_severity' + str(i))
                        for i in range(len(severities))))
                for i, s in enumerate(severities):
                    parms['b_severity' +
                          str(i)] = s.encode('utf-8') if six.PY2 else s

            order_columns = []
            if sort_by is not None:
                order_columns = [
                    literal_column('ad.' + col) for col in sort_by
                ]
                if 'id' not in sort_by:
                    order_columns.append(ad.c.id)
            else:
                order_columns = [ad.c.id]

            if offset:
                query = query.offset(bindparam('b_offset'))
                parms['b_offset'] = offset

            query = query.order_by(*order_columns)

            query = query.limit(bindparam('b_limit'))

            parms['b_limit'] = limit + 1

            return [dict(row) for row in conn.execute(query, parms).fetchall()]
Ejemplo n.º 44
0
from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, \
                       null, text, \
                       Boolean, DateTime, LargeBinary, String
from sqlalchemy.dialects.postgresql import JSONB, UUID

SQL_NULL = null()
SQL_UTC = text("timezone('UTC'::text, CURRENT_TIMESTAMP)")
SQL_UUID = text("gen_random_uuid()")  # Require pgcrypto

metadata = MetaData()

t_user_info = Table(
    "user_info",
    metadata,
    Column("uid", String, primary_key=True),
    Column("tstamp", DateTime, nullable=False, server_default=SQL_UTC),
    Column("last_auth", DateTime, nullable=False, server_default=SQL_UTC),
)

t_document_hist = Table(
    "document_hist",
    metadata,
    Column("hid", UUID, primary_key=True, server_default=SQL_UUID),
    Column("pid", String, nullable=False),
    Column("title", String, nullable=False),
    Column("metadata", JSONB, nullable=False, server_default=text("'{}'")),
    Column("published", Boolean, server_default=text("false")),
    Column("tstamp", DateTime),
)

t_document_event = Table(
    def update_or_patch_alarm_definition(self,
                                         tenant_id,
                                         alarm_definition_id,
                                         name,
                                         expression,
                                         sub_expr_list,
                                         actions_enabled,
                                         description,
                                         alarm_actions,
                                         ok_actions,
                                         undetermined_actions,
                                         match_by,
                                         severity,
                                         patch=False):

        with self._db_engine.begin() as conn:
            original_row = self._get_alarm_definition(conn, tenant_id,
                                                      alarm_definition_id)
            rows = self._get_sub_alarm_definitions(conn, alarm_definition_id)

            old_sub_alarm_defs_by_id = {}

            for row in rows:
                sad = sub_alarm_definition.SubAlarmDefinition(row=row)
                old_sub_alarm_defs_by_id[sad.id] = sad

            if expression:
                (changed_sub_alarm_defs_by_id, new_sub_alarm_defs_by_id,
                 old_sub_alarm_defs_by_id, unchanged_sub_alarm_defs_by_id
                 ) = self._determine_sub_expr_changes(
                     alarm_definition_id, old_sub_alarm_defs_by_id,
                     sub_expr_list)

                if old_sub_alarm_defs_by_id or new_sub_alarm_defs_by_id:
                    new_count = (len(new_sub_alarm_defs_by_id) +
                                 len(changed_sub_alarm_defs_by_id) +
                                 len(unchanged_sub_alarm_defs_by_id))
                    old_count = len(old_sub_alarm_defs_by_id)
                    if new_count != old_count:
                        msg = 'number of subexpressions must not change'
                    else:
                        msg = 'metrics in subexpression must not change'
                    raise exceptions.InvalidUpdateException(msg.encode('utf8'))
            else:
                unchanged_sub_alarm_defs_by_id = old_sub_alarm_defs_by_id
                changed_sub_alarm_defs_by_id = {}
                new_sub_alarm_defs_by_id = {}
                old_sub_alarm_defs_by_id = {}

            # Get a common update time
            now = datetime.datetime.utcnow()

            if name is None:
                new_name = original_row['name']
            else:
                new_name = name.encode('utf-8') if six.PY2 else name

            if description is None:
                if patch:
                    new_description = original_row['description']
                else:
                    new_description = ''
            else:
                new_description = description.encode(
                    'utf-8') if six.PY2 else description

            if expression is None:
                new_expression = original_row['expression']
            else:
                new_expression = expression.encode(
                    'utf8') if six.PY2 else expression

            if severity is None:
                if patch:
                    new_severity = original_row['severity']
                else:
                    new_severity = 'LOW'
            else:
                new_severity = severity.encode('utf8') if six.PY2 else severity

            if match_by is None:
                if patch:
                    new_match_by = original_row['match_by']
                else:
                    new_match_by = None
            else:
                match = ",".join(match_by)
                new_match_by = match.encode('utf8') if six.PY2 else match

            if new_match_by != original_row['match_by']:
                msg = u"match_by must not change"
                raise exceptions.InvalidUpdateException(msg)

            if actions_enabled is None:
                new_actions_enabled = original_row['actions_enabled']
            else:
                new_actions_enabled = actions_enabled

            conn.execute(
                self.update_or_patch_alarm_definition_update_ad_query.values(
                    name=bindparam('b_name'),
                    description=bindparam('b_description'),
                    expression=bindparam('b_expression'),
                    match_by=bindparam('b_match_by'),
                    severity=bindparam('b_severity'),
                    actions_enabled=bindparam('b_actions_enabled'),
                    updated_at=bindparam('b_updated_at')),
                b_name=new_name,
                b_description=new_description,
                b_expression=new_expression,
                b_match_by=new_match_by,
                b_severity=new_severity,
                b_actions_enabled=bool(new_actions_enabled),
                b_updated_at=now,
                b_tenant_id=tenant_id,
                b_id=alarm_definition_id)
            parms = []
            for sub_alarm_definition_id, sub_alarm_def in (
                    changed_sub_alarm_defs_by_id.items()):
                parms.append({
                    'b_operator': sub_alarm_def.operator,
                    'b_threshold': sub_alarm_def.threshold,
                    'b_is_deterministic': sub_alarm_def.deterministic,
                    'b_updated_at': now,
                    'b_id': sub_alarm_definition_id
                })
            if len(parms) > 0:
                query = self.update_or_patch_alarm_definition_update_sad_query
                conn.execute(query, parms)

            # Delete old alarm actions
            if patch:
                if alarm_actions is not None:
                    self._delete_alarm_actions(conn, alarm_definition_id,
                                               'ALARM')
                if ok_actions is not None:
                    self._delete_alarm_actions(conn, alarm_definition_id, 'OK')
                if undetermined_actions is not None:
                    self._delete_alarm_actions(conn, alarm_definition_id,
                                               'UNDETERMINED')
            else:
                conn.execute(self.delete_aa_query,
                             b_alarm_definition_id=alarm_definition_id)

            # Insert new alarm actions
            self._insert_into_alarm_action(conn, alarm_definition_id,
                                           alarm_actions, u"ALARM")

            self._insert_into_alarm_action(conn, alarm_definition_id,
                                           undetermined_actions,
                                           u"UNDETERMINED")

            self._insert_into_alarm_action(conn, alarm_definition_id,
                                           ok_actions, u"OK")

            ad = self.ad_s
            query = (self.base_query.select_from(self.base_query_from).where(
                ad.c.tenant_id == bindparam('b_tenant_id')).where(
                    ad.c.id == bindparam('b_id')).where(
                        ad.c.deleted_at == null()))

            updated_row = conn.execute(query,
                                       b_id=alarm_definition_id,
                                       b_tenant_id=tenant_id).fetchone()

            if updated_row is None:
                raise Exception("Failed to find current alarm definition")

            sub_alarm_defs_dict = {
                'old': old_sub_alarm_defs_by_id,
                'changed': changed_sub_alarm_defs_by_id,
                'new': new_sub_alarm_defs_by_id,
                'unchanged': unchanged_sub_alarm_defs_by_id
            }

            # Return the alarm def and the sub alarm defs
            return updated_row, sub_alarm_defs_dict
Ejemplo n.º 46
0
def available_strain(term):
    '''Generate query for available strain'''
    return db.session.query(distinct(Taxa.strain), null()) \
             .filter(Taxa.strain.ilike('{}%'.format(term))) \
             .order_by(Taxa.strain)
Ejemplo n.º 47
0
    def data(self,
             use_choice_labels=False,
             expand_collections=False,
             ignore_private=True):

        session = self.db_session

        AliquotLocation = aliased(lims.Location)
        SpecimenLocation = aliased(lims.Location)

        query = (session.query(
            lims.AliquotType.title.label('aliquot_type'),
            lims.Aliquot.store_date.label('store_date'),
            lims.Aliquot.amount.label('amount'),
            lims.AliquotType.units.label('units'),
            AliquotLocation.title.label('aliquot_location'),
            lims.Aliquot.freezer.label('freezer'),
            lims.Aliquot.rack.label('rack'), lims.Aliquot.box.label('box'),
            func.count().label('aliquot_count'),
            lims.AliquotState.title.label('aliquot_state'),
            lims.Aliquot.sent_date.label('sent_date'),
            lims.Aliquot.sent_name.label('sent_name'),
            lims.Aliquot.sent_notes.label('sent_notes'),
            lims.Aliquot.thawed_num.label('thawed_num'),
            lims.Aliquot.inventory_date.label('inventory_date'),
            lims.Aliquot.notes.label('aliquot_notes'),
            lims.SpecimenType.title.label('specimen_type'),
            lims.Specimen.collect_time.label('collect_time'),
            lims.Specimen.collect_date.label('collect_date'),
            studies.Study.title.label('specimen_study'),
            studies.Cycle.week.label('specimen_cycle'),
            SpecimenLocation.title.label('specimen_destination'),
            lims.SpecimenState.title.label('specimen_state'),
            lims.Specimen.tubes.label('tubes'),
            lims.SpecimenType.tube_type.label('tube_type'),
            lims.Specimen.notes.label('specimen_notes'),
            studies.Site.name.label('site'), studies.Patient.pid.label('pid'),
            studies.Patient.pid.label('our'),
            studies.Patient.nurse.label('nurse_email'),
            (session.query(studies.PatientReference.reference_number).join(
                studies.ReferenceType).filter(
                    studies.ReferenceType.name == u'aeh_num').filter(
                        studies.PatientReference.patient_id ==
                        studies.Patient.id).limit(1).correlate(
                            studies.Patient).as_scalar().label('aeh_num'))
        ).select_from(lims.Aliquot).join(lims.Aliquot.specimen).outerjoin(
            lims.Aliquot.aliquot_type).outerjoin(lims.Aliquot.state).outerjoin(
                AliquotLocation, lims.Aliquot.location).outerjoin(
                    SpecimenLocation, lims.Specimen.location).outerjoin(
                        lims.Specimen.cycle).outerjoin(
                            studies.Cycle.study).outerjoin(
                                lims.Specimen.specimen_type).outerjoin(
                                    lims.Specimen.state).
                 join(lims.Specimen.patient).join(studies.Patient.site).filter(
                     lims.AliquotState.title != u'Aliquot Not used').filter(
                         func.coalesce(lims.Aliquot.freezer, lims.Aliquot.rack,
                                       lims.Aliquot.box) != null()).
                 group_by(lims.AliquotType.title, lims.Aliquot.store_date,
                          lims.Aliquot.amount, lims.AliquotType.units,
                          AliquotLocation.title, lims.Aliquot.freezer,
                          lims.Aliquot.rack, lims.Aliquot.box,
                          lims.AliquotState.title, lims.Aliquot.sent_date,
                          lims.Aliquot.sent_name, lims.Aliquot.sent_notes,
                          lims.Aliquot.thawed_num, lims.Aliquot.inventory_date,
                          lims.Aliquot.notes, lims.SpecimenType.title,
                          lims.Specimen.collect_time,
                          lims.Specimen.collect_date, studies.Study.title,
                          studies.Cycle.week, SpecimenLocation.title,
                          lims.SpecimenState.title, lims.Specimen.tubes,
                          lims.SpecimenType.tube_type, lims.Specimen.notes,
                          studies.Patient.id, studies.Patient.pid,
                          studies.Patient.nurse, 'aeh_num', studies.Site.name))

        return query
Ejemplo n.º 48
0
def available_acc(term):
    '''Generate query for available accession'''
    return db.session.query(distinct(DnaSequence.acc), null()) \
             .filter(DnaSequence.acc.ilike('{}%'.format(term))) \
             .order_by(DnaSequence.acc)
Ejemplo n.º 49
0
def test_delete_fileset(api, users, location, es, fixtures_path,
                        test_metadata_format, task_delay):
    with api.test_request_context(), api.test_client() as client:
        client.post(
            url_for_security("login"),
            data={
                "email": users[0]["email"],
                "password": "******"
            },
        )

        # Create a deposit, initially with a metadata deposit
        original_response = client.post(
            url_for("invenio_sword.depid_service_document"),
            data=json.dumps({
                "@context":
                "https://swordapp.github.io/swordv3/swordv3.jsonld",
                "title": "A title",
            }),
            headers={
                "Content-Disposition": "attachment; metadata=true",
                "In-Progress": "true",
            },
        )

        assert ObjectVersion.query.count() == 1

        # Add some extra metadata
        response = client.post(
            original_response.json["metadata"]["@id"],
            data=b"some metadata",
            headers={"Metadata-Format": test_metadata_format},
        )
        assert response.status_code == HTTPStatus.NO_CONTENT

        assert ObjectVersion.query.count() == 2

        with open(os.path.join(fixtures_path, "bagit.zip"), "rb") as f:
            response = client.post(
                original_response.headers["Location"],
                data=f,
                headers={
                    "Packaging": SWORDBagItPackaging.packaging_name,
                    "Content-Type": "application/zip",
                    "In-Progress": "true",
                },
            )
            assert response.status_code == HTTPStatus.OK

        assert task_delay.call_count == 1
        task_self = task_delay.call_args[0][0]
        task_self.apply()

        # One test metadata, one old SWORD metadata, one new SWORD metadata, one original deposit, and two files
        assert ObjectVersion.query.count() == 6
        # One test metadata, one new SWORD metadata, one original deposit, and two files
        assert ObjectVersion.query.filter(
            ObjectVersion.is_head == true()).count() == 5

        # Now let's delete the fileset. This should ensure that there is only one extant file, as the SWORD metadata and
        # original deposit were deposited as part of a fileset, leaving only the test dataset
        response = client.delete(original_response.json["fileSet"]["@id"])
        assert response.status_code == HTTPStatus.NO_CONTENT

        # All four previously extent files now have file_id=NULL versions
        assert ObjectVersion.query.count() == 10
        assert (ObjectVersion.query.filter(
            ObjectVersion.is_head == true(),
            ObjectVersion.file_id != null()).count() == 1)
Ejemplo n.º 50
0
def available_compoundseq(term):
    '''Generate query for available compound by peptide sequence'''
    return db.session.query(distinct(Compound.peptide_sequence), null()) \
             .filter(Compound.peptide_sequence.ilike('{}%'.format(term))) \
             .order_by(Compound.peptide_sequence)
Ejemplo n.º 51
0
def getChildrenInfo(entry):
    # print("getChildrenInfo")
    if entry['area_type'] == 0:
        return []
    elif entry['area_type'] == 1:
        children_info = []
        children = db.session.query(AreaModel)\
            .filter(AreaModel.parent_id==entry['id'])\
            .filter(AreaModel.parent_name==entry['name'])\
            .all()
        if type(children) == list:
            for child in children:
                counts = countClimbs(child)
                children_info.append({
                    'name': child.name,
                    'route': f"/area/{child.id}/{child.name}",
                    'counts': counts
                })
        else:
            counts = countClimbs(child)
            children_info.append({
                'name': child.name,
                'route': f"/area/{child.id}/{child.name}",
                'counts': counts
            })
        return children_info

    elif entry['area_type'] == 2:
        sorted_info = []
        unsorted_info = []
        # Check boulders
        boulders = db.session.query(BoulderModel.position.label('position'),\
            BoulderModel.climb_type.label('climb_type'),\
            BoulderModel.id.label('id'),\
            BoulderModel.name.label('name'),\
            BoulderModel.grade.label('grade'),\
            BoulderModel.quality.label('quality'),\
            BoulderModel.danger.label('danger'),\
            BoulderModel.height.label('height'),\
            sa.null().label('pitches'),\
            sa.null().label('committment'),\
            BoulderModel.fa.label('fa'),\
            BoulderModel.description.label('description'),\
            BoulderModel.pro.label('pro'),\
            BoulderModel.elevation.label('elevation'),\
            BoulderModel.lat.label('lat'),\
            BoulderModel.lng.label('lng'),\
            sa.null().label('route_type'))\
            .filter(BoulderModel.parent_id==entry['id']).filter(BoulderModel.parent_name==entry['name'])
        # Check routes
        routes = db.session.query(RouteModel.position.label('position'),\
            RouteModel.climb_type.label('climb_type'),\
            RouteModel.id.label('id'),\
            RouteModel.name.label('name'),\
            RouteModel.grade.label('grade'),\
            RouteModel.quality.label('quality'),\
            RouteModel.danger.label('danger'),\
            RouteModel.height.label('height'),\
            RouteModel.pitches.label('pitches'),\
            RouteModel.committment.label('committment'),\
            RouteModel.fa.label('fa'),\
            RouteModel.description.label('description'),\
            RouteModel.pro.label('pro'),\
            RouteModel.elevation.label('elevation'),\
            RouteModel.lat.label('lat'),\
            RouteModel.lng.label('lng'),\
            RouteModel.route_type.label('route_type'))\
            .filter(RouteModel.parent_id==entry['id']).filter(RouteModel.parent_name==entry['name'])
        # Union boulders + routes and sort
        q = boulders.union(routes)
        children = q.order_by('position')

        gradeByClimb_type = {
            'boulder': boulderInt2Grade,
            'route': routeInt2Grade
        }
        # Append info to corresponding lists
        for child in children:
            # (position, climb_type, id, name, grade, quality, danger, height pitches, committment, fa, desc, pro, elev, lat, lng, route_type)
            child_entry = {
                'id': child[2],
                'name': child[3],
                'properties': {
                    'grade': gradeByClimb_type[child[1]](child[4]),
                    'route_type': route_types[child[16]],
                    'quality': child[5],
                    'danger': dangerInt2Movie[child[6]],
                    'height': child[7],
                    'pitches': child[8],
                    'committment': child[9],
                    'fa': child[10],
                    'description': child[11],
                    'pro': child[12],
                    'elevation': child[13],
                    'coords': {
                        'lat': child[14],
                        'lng': child[15]
                    }
                },
                'climb_type': child[1],
                'route': f"{child[1]}/{child[2]}/{child[3]}"
            }
            if child[0] == 0:
                print(f"Unsorted - {child[2]}")
                unsorted_info.append(child_entry)
            else:
                sorted_info.append(child_entry)

        return {'sorted': sorted_info, 'unsorted': unsorted_info}
Ejemplo n.º 52
0
def available_compoundclass(term):
    '''Generate query for available compound by class'''
    return db.session.query(distinct(Compound._class), null()) \
             .filter(Compound._class.ilike('{}%'.format(term))) \
             .order_by(Compound._class)
Ejemplo n.º 53
0
def fix_numeric_choice_names():
    op.create_check_constraint(
        'ck_choice_numeric_name', 'choice',
        sa.cast(sa.sql.column('name'), sa.Integer) != sa.null())
Ejemplo n.º 54
0
def available_superkingdom(term):
    '''Generate query for available superkingdoms'''
    return db.session.query(distinct(Taxa.superkingdom), null()) \
             .filter(Taxa.superkingdom.ilike('{}%'.format(term))) \
             .order_by(Taxa.superkingdom)
Ejemplo n.º 55
0
    def get_general_query(cls, change_filter=False, queue=False):
        criteria = []
        basic_filter = [
            cls.id == Name.nrId,
            cls.requestTypeCd.in_([
                EntityTypes.PRIVATE_ACT.value, EntityTypes.CORPORATION.value,
                LegacyEntityTypes.CORPORATION.CCR.value, LegacyEntityTypes.
                CORPORATION.CT.value, LegacyEntityTypes.CORPORATION.RCR.
                value, EntityTypes.COOPERATIVE.value, LegacyEntityTypes.
                COOPERATIVE.CCP.value, LegacyEntityTypes.COOPERATIVE.CTC.
                value, LegacyEntityTypes.COOPERATIVE.RCP.value, EntityTypes.
                FINANCIAL_INSTITUTION.value, LegacyEntityTypes.
                FINANCIAL_INSTITUTION.CFI.value, LegacyEntityTypes.
                FINANCIAL_INSTITUTION.RFI.value, EntityTypes.SOCIETY.
                value, LegacyEntityTypes.SOCIETY.ASO.value, LegacyEntityTypes.
                SOCIETY.CSO.value, LegacyEntityTypes.SOCIETY.CSSO.
                value, LegacyEntityTypes.SOCIETY.CTSO.value, LegacyEntityTypes.
                SOCIETY.RSO.value, EntityTypes.UNLIMITED_LIABILITY_COMPANY.
                value, LegacyEntityTypes.UNLIMITED_LIABILITY_COMPANY.UC.
                value, LegacyEntityTypes.UNLIMITED_LIABILITY_COMPANY.CUL.
                value, LegacyEntityTypes.UNLIMITED_LIABILITY_COMPANY.ULCT.
                value, LegacyEntityTypes.UNLIMITED_LIABILITY_COMPANY.RUL.
                value, EntityTypes.XPRO_SOCIETY.value, LegacyEntityTypes.
                XPRO_SOCIETY.XASO.value, LegacyEntityTypes.XPRO_SOCIETY.XCASO.
                value, LegacyEntityTypes.XPRO_SOCIETY.XCSO.
                value, LegacyEntityTypes.XPRO_SOCIETY.XRSO.value, EntityTypes.
                COMMUNITY_CONTRIBUTION_COMPANY.value, LegacyEntityTypes.
                COMMUNITY_CONTRIBUTION_COMPANY.CC.value, LegacyEntityTypes.
                COMMUNITY_CONTRIBUTION_COMPANY.CCV.value, LegacyEntityTypes.
                COMMUNITY_CONTRIBUTION_COMPANY.CCCT.value, LegacyEntityTypes.
                COMMUNITY_CONTRIBUTION_COMPANY.RCC.value, EntityTypes.PARISH.
                value, EntityTypes.XPRO_CORPORATION.value, LegacyEntityTypes.
                XPRO_CORPORATION.XCCR.value, LegacyEntityTypes.XPRO_CORPORATION
                .XRCR.value, LegacyEntityTypes.XPRO_CORPORATION.AS.
                value, EntityTypes.XPRO_UNLIMITED_LIABILITY_COMPANY.
                value, LegacyEntityTypes.XPRO_UNLIMITED_LIABILITY_COMPANY.UA.
                value, LegacyEntityTypes.XPRO_UNLIMITED_LIABILITY_COMPANY.XCUL.
                value, LegacyEntityTypes.XPRO_UNLIMITED_LIABILITY_COMPANY.XRUL.
                value, EntityTypes.XPRO_COOPERATIVE.value, LegacyEntityTypes.
                XPRO_COOPERATIVE.XCCP.value, LegacyEntityTypes.
                XPRO_COOPERATIVE.XRCP.value, EntityTypes.BENEFIT_COMPANY.value
            ] if not change_filter else [
                EntityTypes.PRIVATE_ACT.value, EntityTypes.CORPORATION.value,
                LegacyEntityTypes.CORPORATION.CCR.value, LegacyEntityTypes.
                CORPORATION.RCR.value, EntityTypes.COOPERATIVE.value,
                LegacyEntityTypes.COOPERATIVE.CCP.value, LegacyEntityTypes.
                COOPERATIVE.RCP.value, EntityTypes.FINANCIAL_INSTITUTION.
                value, LegacyEntityTypes.FINANCIAL_INSTITUTION.CFI.
                value, LegacyEntityTypes.FINANCIAL_INSTITUTION.RFI.value,
                LegacyEntityTypes.SOCIETY.ASO.value, LegacyEntityTypes.SOCIETY.
                CSO.value, LegacyEntityTypes.SOCIETY.RSO.value, EntityTypes.
                UNLIMITED_LIABILITY_COMPANY.value, LegacyEntityTypes.
                UNLIMITED_LIABILITY_COMPANY.UC.value, LegacyEntityTypes.
                UNLIMITED_LIABILITY_COMPANY.CUL.value, LegacyEntityTypes.
                UNLIMITED_LIABILITY_COMPANY.RUL.value, EntityTypes.
                COMMUNITY_CONTRIBUTION_COMPANY.value, LegacyEntityTypes.
                COMMUNITY_CONTRIBUTION_COMPANY.CC.value, LegacyEntityTypes.
                COMMUNITY_CONTRIBUTION_COMPANY.RCC.value, EntityTypes.PARISH.
                value, EntityTypes.BENEFIT_COMPANY.value
            ]),
        ]

        queue_request_state_filter = [
            cls.stateCd.in_([State.DRAFT, State.HOLD, State.INPROGRESS])
        ]

        corp_request_state_filter = [
            cls.stateCd.in_([
                State.APPROVED, State.CONDITIONAL, State.COND_RESERVE,
                State.RESERVED
            ]),
        ]

        name_state_filter = [
            Name.state.in_([
                NameState.APPROVED.value, NameState.CONDITION.value,
                NameState.RESERVED.value, NameState.COND_RESERVE.value
            ])
        ]

        not_consumed_filter = [
            cls.expirationDate > func.current_Date(),
            Name.corpNum.is_(None),
            Name.consumptionDate.is_(None)
        ]

        consumed_filter = [
            Name.corpNum.isnot(None),
        ]

        if queue:
            criteria.append(
                RequestConditionCriteria(
                    fields=[
                        Name.name,
                        sqlalchemy.null().label('consumptionDate'),
                        cls.submittedDate,
                        sqlalchemy.null().label('corpNum'), cls.nrNum
                    ],
                    filters=[basic_filter, queue_request_state_filter]))
        else:
            criteria.append(
                RequestConditionCriteria(fields=[
                    Name.name, Name.consumptionDate,
                    sqlalchemy.null().label('submittedDate'), Name.corpNum,
                    sqlalchemy.null().label('nrNum')
                ],
                                         filters=[
                                             basic_filter,
                                             corp_request_state_filter,
                                             name_state_filter, consumed_filter
                                         ]))
            criteria.append(
                RequestConditionCriteria(fields=[
                    Name.name,
                    sqlalchemy.null().label('consumptionDate'),
                    cls.submittedDate,
                    sqlalchemy.null().label('corpNum'), cls.nrNum
                ],
                                         filters=[
                                             basic_filter,
                                             corp_request_state_filter,
                                             name_state_filter,
                                             not_consumed_filter
                                         ]))

        return criteria
Ejemplo n.º 56
0
def available_phylum(term):
    '''Generate query for available phyla'''
    return db.session.query(distinct(Taxa.phylum), null()) \
             .filter(Taxa.phylum.ilike('{}%'.format(term))) \
             .order_by(Taxa.phylum)
Ejemplo n.º 57
0
 ops.Count: _reduction(sa.func.count),
 ops.Sum: _reduction(sa.func.sum),
 ops.Mean: _reduction(sa.func.avg),
 ops.Min: _reduction(sa.func.min),
 ops.Max: _reduction(sa.func.max),
 ops.CountDistinct: _count_distinct,
 ops.GroupConcat: _reduction(sa.func.group_concat),
 ops.Between: fixed_arity(sa.between, 3),
 ops.IsNull: _is_null,
 ops.NotNull: _not_null,
 ops.Negate: _negate,
 ops.Round: _round,
 ops.TypeOf: unary(sa.func.typeof),
 ops.Literal: _literal,
 ops.ValueList: _value_list,
 ops.NullLiteral: lambda *args: sa.null(),
 ops.SimpleCase: _simple_case,
 ops.SearchedCase: _searched_case,
 ops.TableColumn: _table_column,
 ops.TableArrayView: _table_array_view,
 transforms.ExistsSubquery: _exists_subquery,
 transforms.NotExistsSubquery: _exists_subquery,
 # miscellaneous varargs
 ops.Least: varargs(sa.func.least),
 ops.Greatest: varargs(sa.func.greatest),
 # string
 ops.LPad: fixed_arity(sa.func.lpad, 3),
 ops.RPad: fixed_arity(sa.func.rpad, 3),
 ops.Strip: unary(sa.func.trim),
 ops.LStrip: unary(sa.func.ltrim),
 ops.RStrip: unary(sa.func.rtrim),
Ejemplo n.º 58
0
def available_order(term):
    '''Generate query for available order'''
    return db.session.query(distinct(Taxa.taxonomic_order), null()) \
             .filter(Taxa.taxonomic_order.ilike('{}%'.format(term))) \
             .order_by(Taxa.taxonomic_order)
Ejemplo n.º 59
0
    """
    send a request for a host with no tags
    """
    created_hosts = mq_create_four_specific_hosts

    host_with_no_tags = created_hosts[3]
    expected_response = {host_with_no_tags.id: []}

    url = build_host_tags_url(host_list_or_id=host_with_no_tags.id)
    response_status, response_data = api_get(url)

    assert response_status == 200
    assert len(expected_response) == len(response_data["results"])


@pytest.mark.parametrize("tags", (None, null()))
def test_get_tags_from_host_with_null_tags(tags, mq_create_four_specific_hosts, api_get):
    # FIXME: Remove this test after migration to NOT NULL.
    created_hosts = mq_create_four_specific_hosts

    host_id = created_hosts[0].id
    update_host_in_db(host_id, tags=tags)

    url = build_host_tags_url(host_list_or_id=host_id)
    response_status, response_data = api_get(url)

    assert response_status == 200
    assert {host_id: []} == response_data["results"]


@pytest.mark.parametrize("tags", (None, null()))
Ejemplo n.º 60
0
def available_family(term):
    '''Generate query for available family'''
    return db.session.query(distinct(Taxa.family), null()) \
             .filter(Taxa.family.ilike('{}%'.format(term))) \
             .order_by(Taxa.family)