コード例 #1
0
def run():
    
    
    activecnt = meta.Session.query(
                NotamUpdate.appearnotam).filter(
                    NotamUpdate.disappearnotam==None).distinct().subquery()
    
    now=datetime.utcnow()
    cutoff=now-timedelta(days=14)
    
    
    ncnt=meta.Session.query(Notam).filter(
                    sa.and_(
                        sa.not_(sa.exists().where(sa.and_(
                            Notam.ordinal==NotamUpdate.appearnotam,
                            NotamUpdate.disappearnotam==None))),
                        Notam.downloaded<cutoff
                    )).delete(False)

    ucnt=meta.Session.query(NotamUpdate).filter(
                    sa.and_(
                        sa.exists().where(
                            sa.and_(
                                Notam.downloaded<cutoff,
                                Notam.ordinal==NotamUpdate.disappearnotam)),
                        NotamUpdate.disappearnotam!=None
                    )).delete(False)
    print "Deleted %d notams, and %d notamupdates"%(ncnt,ucnt)
    meta.Session.flush()
    meta.Session.commit()
コード例 #2
0
def upgrade():
    conn = op.get_bind()

    cocols = [
        'created', 'updated', 'active', 'version', 'polymorphic_type',
        'id', 'name',
    ]
    co = sa.table('contribution', *map(sa.column, ['pk'] + cocols))
    chcols = ['pk', 'sortkey']
    ch = sa.table('chapter', *map(sa.column, chcols))

    id_, name = map(sa.bindparam, ['id_', 'name'])

    cowhere = [co.c.id == id_, co.c.name == name]

    insert_co = co.insert(bind=conn).from_select(cocols,
        sa.select([sa.func.now(), sa.func.now(), True, 1, sa.literal('custom'), id_, name])
        .where(~sa.exists().where(sa.or_(*cowhere))))

    co_pk = sa.select([co.c.pk]).where(sa.and_(*cowhere)).as_scalar()
    sortkey = sa.bindparam('sortkey')

    insert_ch = ch.insert(bind=conn).from_select(chcols,
        sa.select([co_pk, sortkey])
        .where(~sa.exists().where(ch.c.pk == co.c.pk).where(sa.or_(*cowhere)))
        .where(sa.exists().where(sa.and_(*cowhere))))

    insert_co.execute(id_=ID, name=NAME)
    insert_ch.execute(id_=ID, name=NAME, sortkey=SORTKEY)
コード例 #3
0
ファイル: ddtss.py プロジェクト: fpirola/DDTSS-Django
    def get_next_to_translate(self, session):
        """ Use the milestones and priority to find the next description to translate """
        # This is the query we want:
        # select description_id from (
        #         select description_id, 50 as score from languages_tb join description_milestone_tb on (milestone_high=milestone) where language = 'nl'
        # union all
        #         select description_id, 30 from languages_tb join description_milestone_tb on (milestone_medium=milestone) where language = 'nl'
        # union all
        #         select description_id, 10 from languages_tb join description_milestone_tb on (milestone_low=milestone) where language = 'nl'
        # union all
        #         select description_id, prioritize from description_tb
        # ) x
        # where not exists (select 1 from translation_tb where translation_tb.description_id = x.description_id)
        # group by description_id order by sum(score) desc
        # limit 1;
        lang_cte = session.query(Languages).filter_by(language=self.language).cte("language")

        prio = session.query(Description.description_id, Description.prioritize)
        high = session.query(DescriptionMilestone.description_id, literal(50).label("prioritize")).join(lang_cte, lang_cte.c.milestone_high==DescriptionMilestone.milestone)
        medium = session.query(DescriptionMilestone.description_id, literal(30).label("prioritize")).join(lang_cte, lang_cte.c.milestone_medium==DescriptionMilestone.milestone)
        low = session.query(DescriptionMilestone.description_id, literal(10).label("prioritize")).join(lang_cte, lang_cte.c.milestone_low==DescriptionMilestone.milestone)

        prio_cte = union_all(prio, high, medium, low).cte()

        q = session.query(prio_cte.c.description_tb_description_id). \
                    filter(~exists([1], Translation.description_id == prio_cte.c.description_tb_description_id)). \
                    filter(~exists([1], PendingTranslation.description_id == prio_cte.c.description_tb_description_id)). \
                    group_by(prio_cte.c.description_tb_description_id). \
                    order_by(func.sum(prio_cte.c.description_tb_prioritize).desc())

        row = q.first()
        if row:
            return row[0]
コード例 #4
0
    def countFilterOnDynProp(self,fullQuery,criteria):
        curDynProp = self.GetDynProp(criteria['Column'])
        if curDynProp is None :
            return fullQuery

        if 'date' in curDynProp['TypeProp'].lower() :
            try:
                criteria['Value'] = parse(criteria['Value'].replace(' ',''))
            except: pass

        filterCriteria = eval_.eval_binary_expr(self.GetDynPropValueView().c['Value'+curDynProp['TypeProp']],criteria['Operator'],criteria['Value'] )
        if filterCriteria is not None :
            existQuery = select([self.GetDynPropValueView()])
            existQuery = existQuery.where(
                and_(
                self.GetDynPropValueView().c['Name'] == criteria['Column'],
                filterCriteria
                ))
            existQuery = existQuery.where(self.ObjWithDynProp.ID == self.GetDynPropValueView().c[self.ObjWithDynProp().GetSelfFKNameInValueTable()])
            # fullQuery = fullQuery.where(exists(
            #     existQuery.where(self.ObjWithDynProp.ID == self.GetDynPropValueView().c[self.ObjWithDynProp().GetSelfFKNameInValueTable()])))

        if curDynProp != None and criteria['Operator'].lower() in ['is','is not'] and criteria['Value'].lower() == 'null':
            nullExistQuery = select([self.GetDynPropValueView()])
            nullExistQuery = nullExistQuery.where(
                and_(self.GetDynPropValueView().c['Name'] == criteria['Column']
                    ,self.ObjWithDynProp.ID == self.GetDynPropValueView().c[self.ObjWithDynProp().GetSelfFKNameInValueTable()])
                )
            fullQuery = fullQuery.where(or_(~exists(nullExistQuery),exists(existQuery)))
        else : 
            fullQuery = fullQuery.where(exists(existQuery)) 
        return fullQuery
コード例 #5
0
ファイル: models.py プロジェクト: closears/payment_manager
 def total_standard(cls, person, peroid):
     return select([func.sum(Standard.money).label('money')]).where(
         exists().where(and_(
             Standard.id == cls.standard_id,
             exists().where(and_(
                 cls.person_id == person.id,
                 cls.effective_before(peroid)))))).c.money
コード例 #6
0
ファイル: List.py プロジェクト: FredericBerton/ecoReleve-Data
    def whereInEquipement(self,fullQueryJoin,criteria):
        sensorObj = list(filter(lambda x:'FK_Sensor'==x['Column'], criteria))[0]
        sensor = sensorObj['Value']

        table = Base.metadata.tables['MonitoredSiteEquipment']
        joinTable = outerjoin(table,Sensor, table.c['FK_Sensor'] == Sensor.ID)

        if sensorObj['Operator'].lower() in ['is','is not'] and sensorObj['Value'].lower() == 'null':
            subSelect = select([table.c['FK_MonitoredSite']]
                ).select_from(joinTable).where(
                and_(MonitoredSite.ID== table.c['FK_MonitoredSite']
                    ,or_(table.c['EndDate'] >= func.now(),table.c['EndDate'] == None)
                        ))
            if sensorObj['Operator'].lower() == 'is':
                fullQueryJoin = fullQueryJoin.where(~exists(subSelect))
            else :
                fullQueryJoin = fullQueryJoin.where(exists(subSelect))
        else :
            subSelect = select([table.c['FK_MonitoredSite']]
                ).select_from(joinTable).where(
                and_(MonitoredSite.ID== table.c['FK_MonitoredSite']
                    ,and_(eval_.eval_binary_expr(Sensor.UnicIdentifier,sensorObj['Operator'],sensor)
                        ,or_(table.c['EndDate'] >= func.now(),table.c['EndDate'] == None))
                        ))
            fullQueryJoin = fullQueryJoin.where(exists(subSelect))
        return fullQueryJoin
コード例 #7
0
def upgrade():
    conn = op.get_bind()

    l = sa.table('language', *map(sa.column, ['pk', 'id', 'name']))
    ccols = ['created', 'updated', 'active', 'id', 'name', 'continent']
    c = sa.table('country', *map(sa.column, ['pk'] + ccols))
    lccols = ['created', 'updated', 'active', 'language_pk', 'country_pk']
    lc = sa.table('countrylanguage', *map(sa.column, lccols))

    lwhere = (l.c.id == sa.bindparam('id_'))

    cid, cname, ccont = map(sa.bindparam, ['cc', 'name', 'continent'])
    cwhere = (c.c.id == cid)

    insert_c = c.insert(bind=conn).from_select(ccols,
        sa.select([sa.func.now(), sa.func.now(), True, cid, cname, ccont])
        .where(~sa.exists().where(cwhere)))

    liwhere = sa.exists()\
        .where(lc.c.language_pk == l.c.pk).where(lwhere)\
        .where(lc.c.country_pk == c.c.pk).where(cwhere)

    unlink_country = lc.delete(bind=conn).where(liwhere)

    l_pk = sa.select([l.c.pk]).where(lwhere).as_scalar()
    c_pk = sa.select([c.c.pk]).where(cwhere).as_scalar()

    link_country = lc.insert(bind=conn).from_select(lccols,
        sa.select([sa.func.now(), sa.func.now(), True, l_pk, c_pk])
        .where(~liwhere))

    insert_c.execute(cc=AFTER, name=NAME, continent=CONTINENT)
    for id_ in IDS:
        unlink_country.execute(id_=id_, cc=BEFORE)
        link_country.execute(id_=id_, cc=AFTER)
コード例 #8
0
ファイル: List.py プロジェクト: ktalbi/ecoReleve-Data
    def whereInEquipement(self,fullQueryJoin,criteria):
        sensorObj = list(filter(lambda x:'FK_Sensor'==x['Column'], criteria))[0]
        sensor = sensorObj['Value']

        table = Base.metadata.tables['IndividualEquipment']
        joinTable = outerjoin(table,Sensor, table.c['FK_Sensor'] == Sensor.ID)
        startDate = datetime.now()

        if self.startDate :
            startDate = self.startDate

        subSelect= select([table.c['FK_Individual']]
            ).select_from(joinTable).where(Individual.ID== table.c['FK_Individual']).where(table.c['StartDate'] <= startDate)

        if sensorObj['Operator'].lower() in ['is null','is not null'] :
            if not self.history :
                subSelect = subSelect.where(or_(table.c['EndDate'] >= startDate,table.c['EndDate'] == None))

        else:
            subSelect = subSelect.where(eval_.eval_binary_expr(Sensor.UnicIdentifier,sensorObj['Operator'],sensor))
            if not self.history : 
                subSelect = subSelect.where(or_(table.c['EndDate'] >= startDate,table.c['EndDate'] == None))

        if  'is not' in sensorObj['Operator'].lower():
            if sensorObj['Operator'].lower() == 'is not null' :
                fullQueryJoin = fullQueryJoin.where(exists(subSelect))
            else :
                fullQueryJoin = fullQueryJoin.where(~exists(subSelect))
        else :
            if sensorObj['Operator'].lower() == 'is null' :
                fullQueryJoin = fullQueryJoin.where(~exists(subSelect))
            else:
                fullQueryJoin = fullQueryJoin.where(exists(subSelect))
        return fullQueryJoin
コード例 #9
0
ファイル: user.py プロジェクト: lukasjuhrich/pycroft
    def has_property(self, prop):
        property_granted_select = select(
            [null()],
            from_obj=[
                Property.__table__,
                PropertyGroup.__table__,
                Membership.__table__
            ]
        ).where(
            and_(
                Property.name == prop,
                Property.property_group_id == PropertyGroup.id,
                PropertyGroup.id == Membership.group_id,
                Membership.user_id == self.id,
                Membership.active
            )
        )
        #.cte("property_granted_select")
        return and_(
            not_(exists(
                property_granted_select.where(
                    Property.granted == false())

            )),
            exists(
                property_granted_select.where(
                    Property.granted == true()
                )
            )
        )
コード例 #10
0
ファイル: verifyrequest.py プロジェクト: Mango-J/pushmanager
 def post(self):
     if not self.current_user:
         return self.send_error(403)
     self.requestid = pushmanager.core.util.get_int_arg(self.request, 'id')
     self.pushid = pushmanager.core.util.get_int_arg(self.request, 'push')
     select_query = db.push_pushes.select().where(
         db.push_pushes.c.id == self.pushid,
     )
     update_query = db.push_requests.update().where(SA.and_(
         db.push_requests.c.state == 'staged',
         db.push_requests.c.id == self.requestid,
         SA.exists(
             [1],
             SA.and_(
                 db.push_pushcontents.c.push == self.pushid,
                 db.push_pushcontents.c.request == self.requestid,
             )
         ))).values({
             'state': 'verified',
         })
     finished_query = db.push_requests.select().where(SA.and_(
         db.push_requests.c.state == 'staged',
         SA.exists(
             [1],
             SA.and_(
                 db.push_pushcontents.c.push == self.pushid,
                 db.push_pushcontents.c.request == db.push_requests.c.id,
             )
         )))
     db.execute_transaction_cb([select_query, update_query, finished_query], self.on_db_complete)
コード例 #11
0
ファイル: user.py プロジェクト: agdsn/pycroft
    def has_property(cls, prop, when=None):
        # TODO Use joins
        property_granted_select = select(
            [null()],
            from_obj=[
                Property.__table__,
                PropertyGroup.__table__,
                Membership.__table__
            ]
        ).where(
            and_(
                Property.name == prop,
                Property.property_group_id == PropertyGroup.id,
                PropertyGroup.id == Membership.group_id,
                Membership.user_id == cls.id,
                Membership.active(when)
            )
        )
        #.cte("property_granted_select")
        return and_(
            not_(exists(
                property_granted_select.where(
                    Property.granted == false())

            )),
            exists(
                property_granted_select.where(
                    Property.granted == true()
                )
            )
        ).self_group().label("has_property_" + prop)
コード例 #12
0
 def getNumberFromAlias(self, alias):
     if session.query(exists().where(Recipients.alias == alias)).scalar():
         return session.query(Recipients).filter(Recipients.alias == alias).first().phoneNumber
     elif session.query(exists().where(Recipients.phoneNumber == alias)).scalar():
         return alias
     else:
         raise "Unknown alias in getNumberFromAlias"
コード例 #13
0
ファイル: livepush.py プロジェクト: baris/pushmanager
 def post(self):
     if not self.current_user:
         return self.send_error(403)
     self.pushid = core.util.get_int_arg(self.request, "id")
     push_query = (
         db.push_pushes.update()
         .where(db.push_pushes.c.id == self.pushid)
         .values({"state": "live", "modified": time.time()})
     )
     request_query = (
         db.push_requests.update()
         .where(
             SA.and_(
                 db.push_requests.c.state == "blessed",
                 SA.exists(
                     [1],
                     SA.and_(
                         db.push_pushcontents.c.push == self.pushid,
                         db.push_pushcontents.c.request == db.push_requests.c.id,
                     ),
                 ),
             )
         )
         .values({"state": "live", "modified": time.time()})
     )
     reset_query = (
         db.push_requests.update()
         .where(
             SA.exists(
                 [1],
                 SA.and_(
                     db.push_requests.c.state == "pickme",
                     db.push_pushcontents.c.push == self.pushid,
                     db.push_pushcontents.c.request == db.push_requests.c.id,
                 ),
             )
         )
         .values({"state": "requested"})
     )
     delete_query = db.push_pushcontents.delete().where(
         SA.exists(
             [1],
             SA.and_(
                 db.push_pushcontents.c.push == self.pushid,
                 db.push_pushcontents.c.request == db.push_requests.c.id,
                 db.push_requests.c.state == "requested",
             ),
         )
     )
     live_query = db.push_requests.select().where(
         SA.and_(
             db.push_requests.c.state == "live",
             db.push_pushcontents.c.push == self.pushid,
             db.push_pushcontents.c.request == db.push_requests.c.id,
         )
     )
     db.execute_transaction_cb(
         [push_query, request_query, reset_query, delete_query, live_query], self.on_db_complete
     )
コード例 #14
0
ファイル: filter.py プロジェクト: RetailMeNotSandbox/dart
 def expr(self, i, alias, col, key_groups, t, v, op, last_is_array):
     if i < len(key_groups) - 1:
         subq, c = self.get_subquery(alias, i, key_groups)
         subq = subq.where(self.expr(i + 1, 'dart_a_%s.value' % i, c, key_groups, t, v, op, last_is_array))
         return exists(subq)
     if last_is_array:
         subq, c = self.get_subquery(alias, i, key_groups, True)
         subq = subq.where(op.evaluate(lambda x: x, c, _python_cast(t), v))
         return exists(subq)
     return op.evaluate(_pg_cast(t), col[key_groups[i]], _python_cast(t), v)
コード例 #15
0
ファイル: List.py プロジェクト: jvitus/ecoReleve-Data
    def countQuery(self, criteria=None):
        query = super().countQuery(criteria)

        curEquipmentTable = Base.metadata.tables['CurrentlySensorEquiped']
        MonitoredSiteTable = Base.metadata.tables['MonitoredSite']
        joinTable = outerjoin(
            curEquipmentTable,
            MonitoredSite,
            MonitoredSiteTable.c['ID'] == curEquipmentTable.c['FK_MonitoredSite'])

        for obj in criteria:
            if 'available' in obj['Column']:
                query = self.WhereInJoinTable(query, obj)

            if (obj['Column'] in ['FK_MonitoredSiteName', 'FK_Individual']
                    and obj['Operator'] not in ['is null', 'is not null']):

                queryExist = select(curEquipmentTable.c
                                    ).select_from(joinTable
                                                  ).where(Sensor.ID == curEquipmentTable.c['FK_Sensor'])

                if obj['Column'] == 'FK_MonitoredSiteName':
                    queryExist = queryExist.where(eval_.eval_binary_expr(
                        MonitoredSiteTable.c['Name'],
                        obj['Operator'],
                        obj['Value']))

                if obj['Column'] == 'FK_Individual':
                    queryExist = queryExist.where(eval_.eval_binary_expr(
                        curEquipmentTable.c['FK_Individual'],
                        obj['Operator'],
                        obj['Value']))
                query = query.where(exists(queryExist))

            if (obj['Column'] in ['FK_MonitoredSiteName', 'FK_Individual']
                    and obj['Operator'] in ['is null', 'is not null']):
                queryExist = select(curEquipmentTable.c
                                    ).select_from(joinTable
                                                  ).where(Sensor.ID == curEquipmentTable.c['FK_Sensor'])

                if obj['Column'] == 'FK_Individual':
                    queryExist = queryExist.where(
                        and_(Sensor.ID == curEquipmentTable.c['FK_Sensor'],
                             curEquipmentTable.c['FK_Individual'] != None))

                if obj['Column'] == 'FK_MonitoredSiteName':
                    queryExist = queryExist.where(
                        and_(Sensor.ID == curEquipmentTable.c['FK_Sensor'],
                             curEquipmentTable.c['FK_MonitoredSite'] != None))

                if 'not' in obj['Operator']:
                    query = query.where(exists(queryExist))
                else:
                    query = query.where(not_(exists(queryExist)))
        return query
コード例 #16
0
ファイル: models.py プロジェクト: closears/payment_manager
 def total_wage_before(cls, last_date):
     Assoc = PersonStandardAssoc
     expression = select(
         [func.sum(Standard.money).label('money')]).where(
             exists().where(and_(
                 Assoc.standard_id == Standard.id,
                 exists().where(and_(
                     Assoc.person_id == cls.id,
                     Assoc.effective_before(last_date)))))).c.money +\
         cls.personal_wage
     return expression.label('total_wage_before')
コード例 #17
0
ファイル: models.py プロジェクト: eoglethorpe/hfh
def create():
    if not session.query(exists(select([("schema_name")]).select_from("information_schema.schemata")\
                                        .where("schema_name = 'surveys'"))).scalar():
        engine.execute(CreateSchema('surveys'))

    if not session.query(exists(select([("schema_name")]).select_from("information_schema.schemata")\
                                        .where("schema_name = 'raw'"))).scalar():
        engine.execute(CreateSchema('raw'))


    Base.metadata.create_all(engine)
コード例 #18
0
ファイル: UserListAPI.py プロジェクト: PageLib/ws
    def post(self):
        """Creates a new user"""
        # Parse the arguments.
        parser = reqparse.RequestParser()
        parser.add_argument('login', type=str, location='json', required=True, help='Missing login')
        parser.add_argument('password_hash', type=str, location='json', required=True, help='Missing password')
        parser.add_argument('role', type=str, location='json', required=True, help='Missing role')
        parser.add_argument('entity_id', type=str, location='json', required=True, help='Missing entity_id.')
        parser.add_argument('last_name', type=str, location='json')
        parser.add_argument('first_name', type=str, location='json')
        args = parser.parse_args()

        first_name = args.get('first_name', None)
        last_name = args.get('last_name', None)
        role = args['role']
        entity_id = args['entity_id']
        login = args['login']
        password_hash = args['password_hash']

        ensure_allowed('create', 'user')

        # Check if the role is correct
        if role not in roles:
            app.logger.warning('Request on POST UserListAPI for non existing or missing role')
            return {'error': 'Role POSTed is not allowed'}, 412

        # We check if another non deleted user has the same login
        user_same_login_exists = request.dbs.query(exists().where(and_(User.login == login,
                                                                       not_(User.deleted)))).scalar()
        if user_same_login_exists:
            return {'error': 'User with the same login exists.'}, 412

        # Check if the entity exists
        if not request.dbs.query(exists().where(and_(model.Entity.id == entity_id,
                                                     not_(model.Entity.deleted)))).scalar():
            app.logger.warning('Request on POST UserListAPI with entity_id {} not found'.format(entity_id))
            return {'error': 'entity_id doesn\'t exists'}, 412

        # Write the user in DB.
        user_id = generate_uuid_for(request.dbs, User)
        u = User(
            id=user_id,
            login=login,
            password_hash=hashlib.sha1(password_hash).hexdigest(),
            last_name=last_name,
            first_name=first_name,
            role=role,
            deleted=False,
            entity_id=entity_id
        )
        request.dbs.add(u)
        app.logger.info('User {} (uuid: {}) created'.format(login, user_id))
        return marshal(u.to_dict(), user_fields), 201
コード例 #19
0
    def countFilterOnDynProp(self, fullQuery, criteria):
        curDynProp = self.GetDynProp(criteria['Column'])
        countHisto = False

        if curDynProp is None:
            return fullQuery

        if self.history:
            countHisto = True
        if 'date' in curDynProp['TypeProp'].lower():
            try:
                criteria['Value'] = parse(criteria['Value'].replace(' ', ''))
            except:
                pass

        filterCriteria = eval_.eval_binary_expr(
            self.GetDynPropValueView(
                countHisto=countHisto).c['Value' + curDynProp['TypeProp']],
                criteria['Operator'],
                criteria['Value'])
        if filterCriteria is not None and 'null' not in criteria['Operator'].lower():
            existQuery = select(
                [self.GetDynPropValueView(countHisto=countHisto)])
            existQuery = existQuery.where(
                and_(
                    self.GetDynPropValueView(countHisto=countHisto).c[
                        'Name'] == criteria['Column'],
                    filterCriteria
                ))
            existQuery = existQuery.where(self.ObjWithDynProp.ID == self.GetDynPropValueView(
                countHisto=countHisto).c[self.ObjWithDynProp().GetSelfFKNameInValueTable()])
            fullQuery = fullQuery.where(exists(existQuery))

        elif 'null' in criteria['Operator'].lower():
            existQuery = select(
                [self.GetDynPropValueView(countHisto=countHisto)])
            existQuery = existQuery.where(
                self.GetDynPropValueView(countHisto=countHisto).c[
                    'Name'] == criteria['Column'],
            )
            existQuery = existQuery.where(self.ObjWithDynProp.ID == self.GetDynPropValueView(
                countHisto=countHisto).c[self.ObjWithDynProp().GetSelfFKNameInValueTable()])

            if 'is null' == criteria['Operator'].lower():
                fullQuery = fullQuery.where(or_(
                    exists(existQuery.where(filterCriteria)),
                    ~exists(existQuery)
                ))
            else:
                fullQuery = fullQuery.where(
                    exists(existQuery.where(filterCriteria)))
        return fullQuery
コード例 #20
0
def upgrade():
    conn = op.get_bind()

    language = sa.table('language', *map(sa.column, ['pk', 'id', 'name', 'updated']))
    lid = sa.bindparam('id_')
    lbefore = sa.bindparam('before')
    update_lang = sa.update(language, bind=conn)\
        .where(sa.and_(
            language.c.id == lid,
            language.c.name == lbefore))\
        .values(updated=sa.func.now(), name=sa.bindparam('after'))

    walslanguage = sa.table('walslanguage', *map(sa.column, ['pk', 'ascii_name']))
    aname = sa.bindparam('ascii_name')
    update_wals = sa.update(walslanguage, bind=conn)\
        .where(sa.exists().where(sa.and_(
            language.c.pk == walslanguage.c.pk,
            language.c.id == lid))\
        .where(walslanguage.c.ascii_name != aname))\
        .values(ascii_name=aname)

    icols = ['created', 'updated', 'active', 'version', 'type', 'description', 'lang', 'name']
    identifier = sa.table('identifier', *map(sa.column, ['pk'] + icols))
    itype, idesc, ilang = (sa.bindparam(*a) for a in [('type', 'name'), ('description', 'other'), ('lang', 'en')])
    iname = sa.bindparam('name')
    iwhere = sa.and_(
        identifier.c.type == itype,
        identifier.c.description == idesc,
        identifier.c.lang == ilang,
        identifier.c.name == iname)
    insert_ident = sa.insert(identifier, bind=conn).from_select(icols,
        sa.select([sa.func.now(), sa.func.now(), True, 1, itype, idesc, ilang, iname])
        .where(~sa.exists().where(iwhere)))

    licols = ['created', 'updated', 'active', 'version', 'language_pk', 'identifier_pk']
    languageidentifier = sa.table('languageidentifier', *map(sa.column, licols))
    l_pk = sa.select([language.c.pk]).where(language.c.id == lid)
    i_pk = sa.select([identifier.c.pk]).where(sa.and_(iwhere))
    insert_lang_ident = sa.insert(languageidentifier, bind=conn).from_select(licols,
        sa.select([sa.func.now(), sa.func.now(), True, 1, l_pk.as_scalar(), i_pk.as_scalar()])
        .where(~sa.exists().where(sa.and_(
            languageidentifier.c.language_pk == l_pk,
            languageidentifier.c.identifier_pk == i_pk))))

    for id_, (before, after, keep) in sorted(ID_BEFORE_AFTER_KEEP.items()):
        update_lang.execute(id_=id_, before=before, after=after)
        update_wals.execute(id_=id_, ascii_name=ascii_name(after))
        if keep:
            insert_ident.execute(name=before)
            insert_lang_ident.execute(id_=id_, name=before)
コード例 #21
0
ファイル: List.py プロジェクト: jvitus/ecoReleve-Data
    def WhereInJoinTable(self, query, criteriaObj):
        query = super().WhereInJoinTable(query, criteriaObj)
        curProp = criteriaObj['Column']
        if 'available' in curProp.lower():
            date = criteriaObj['Value']
            try:
                date = parse(date.replace(' ', ''))
            except:
                pass
            e = aliased(Equipment)
            e2 = aliased(Equipment)
            e3 = aliased(Equipment)

            subQueryEquip = select([e2]).where(
                and_(e.FK_Sensor == e2.FK_Sensor,
                     and_(e.StartDate < e2.StartDate, e2.StartDate <= date)))

            querySensor = select([e]).where(
                and_(e.StartDate <= date,
                     and_(e.Deploy == 0,
                          and_(Sensor.ID == e.FK_Sensor,
                               not_(exists(subQueryEquip)))
                          )
                     ))

            subQueryNotEquip = select([e3]).where(
                and_(Sensor.ID == e3.FK_Sensor,
                     e3.StartDate < date))

            if criteriaObj['Operator'].lower() != 'is not':
                query = query.where(or_(exists(querySensor),
                                        not_(exists(subQueryNotEquip))))
            else:
                query = query.where(or_(not_(exists(querySensor)),
                                        not_(exists(subQueryNotEquip))))

        if 'FK_MonitoredSiteName' == curProp:
            MonitoredSiteTable = Base.metadata.tables['MonitoredSite']
            val = criteriaObj['Value']
            query = query.where(eval_.eval_binary_expr(
                MonitoredSiteTable.c['Name'], criteriaObj['Operator'], val))

        if 'FK_Individual' == curProp:
            curEquipmentTable = Base.metadata.tables['CurrentlySensorEquiped']
            val = criteriaObj['Value']
            query = query.where(eval_.eval_binary_expr(
                curEquipmentTable.c['FK_Individual'],
                criteriaObj['Operator'],
                val))
        return query
コード例 #22
0
ファイル: hello.py プロジェクト: GunioRobot/scatterbrainz
 def searchShopAJAX(self):
     artist = request.params['artist']
     album = request.params['album']
     mbid = request.params['mbid']
     if not artist and not album and not mbid:
         return simplejson.dumps({'albums':[], 'numlocal':0, 'truncated':False})
     query = Session.query(MBReleaseGroup, MBReleaseName, MBArtistName, MBReleaseGroupMeta, MBReleaseGroupType) \
                    .join(MBReleaseName) \
                    .join(MBReleaseGroup.artistcredit, MBArtistCredit.name) \
                    .outerjoin(MBReleaseGroupMeta) \
                    .outerjoin(MBReleaseGroupType) \
                    .filter(~exists().where(MBReleaseGroup.gid==Album.mbid)) \
                    .filter(~exists().where(MBReleaseGroup.gid==ShopDownload.release_group_mbid))
     limit = 30
     if mbid:
         query = query.filter(MBReleaseGroup.gid==mbid)
     else:
         if album:
             query = query.filter("to_tsvector('mb_simple', release_name.name) " + \
                           "@@ plainto_tsquery('mb_simple', :album)") \
                          .params(album=album)
         if artist:
             query = query.filter("to_tsvector('mb_simple', artist_name.name) " + \
                           "@@ plainto_tsquery('mb_simple', :artist)") \
                          .params(artist=artist)
     results = query.limit(limit).all()
     resultmbids = set()
     for (album, albumname, artistname, rgmeta, rgtype) in results:
         resultmbids.add(album.gid)
     albums = []
     for (album, albumname, artistname, rgmeta, rgtype) in results:
         if rgmeta and rgmeta.year:
             year = rgmeta.year
         else:
             year = '?'
         if rgtype and rgtype.name:
             rtype = rgtype.name
         else:
             rtype = 'Unknown'
         albums.append({
             'mbid'   : album.gid,
             'album'  : albumname.name,
             'artist' : artistname.name,
             'year'   : year,
             'type'   : rtype
         })
     truncated = len(results) == limit
     return simplejson.dumps({'albums':albums, 'truncated':truncated})
コード例 #23
0
ファイル: test_sqlalchemy.py プロジェクト: thekingofhero/ibis
    def test_exists(self):
        e1, e2 = self._case_exists()

        t1 = self._to_sqla(self.t1).alias("t0")
        t2 = self._to_sqla(self.t2).alias("t1")

        cond1 = sa.exists([L(1)]).where(t1.c.key1 == t2.c.key1)
        ex1 = sa.select([t1]).where(cond1)

        cond2 = sa.exists([L(1)]).where(sql.and_(t1.c.key1 == t2.c.key1, t2.c.key2 == L("foo")))
        ex2 = sa.select([t1]).where(cond2)

        # pytest.skip('not yet implemented')

        self._compare_sqla(e1, ex1)
        self._compare_sqla(e2, ex2)
コード例 #24
0
ファイル: database.py プロジェクト: will-hart/blitz
    def update_session_list(self, sessions_list):
        """
        Session list comes in [session_id, start_timestamp, end_timstamp] format
        This replaces the existing session list

        :param sessions_list: a list of lists of session information [id, timeStarted, timeStopped, numberOfReadings]
        :returns: nothing
        """
        self.logger.debug("Updating session list")

        sess = self._session()
        sess.query(Session).delete()
        sess.commit()

        sessions = []

        for session in sessions_list:

            count = self._session().query(sql.exists().where(Reading.sessionId == session[0])).scalar()
            blitz_session = Session()
            blitz_session.ref_id = session[0]
            blitz_session.timeStarted = session[1]
            blitz_session.timeStopped = session[2]
            blitz_session.numberOfReadings = session[3]
            blitz_session.available = count > 0
            sessions.append(blitz_session)

        self.add_many(sessions)
コード例 #25
0
ファイル: alchemy.py プロジェクト: obswork/ibis
    def _add_select(self, table_set):
        to_select = []
        for expr in self.select_set:
            if isinstance(expr, ir.ValueExpr):
                arg = self._translate(expr, named=True)
            elif isinstance(expr, ir.TableExpr):
                if expr.equals(self.table_set):
                    cached_table = self.context.get_table(expr)
                    if cached_table is None:
                        # the select * case from materialized join
                        arg = '*'
                    else:
                        arg = table_set
                else:
                    arg = self.context.get_table(expr)
                    if arg is None:
                        raise ValueError(expr)

            to_select.append(arg)

        if self.exists:
            clause = sa.exists(to_select)
        else:
            clause = sa.select(to_select)

        if self.distinct:
            clause = clause.distinct()

        if table_set is not None:
            return clause.select_from(table_set)
        else:
            return clause
コード例 #26
0
def adds_yt_video_info_to_db(contains_yt_playlist_info):
    """Adds unique videos to youtube_videos table"""
    
    # parameter from creates_yt_video_playlist(yt_playlist_query):
    # in youtube.py

    print 'SEED.PY, adds_yt_video_info_to_db, contains_yt_playlist_info ', contains_yt_playlist_info

    for video_item in contains_yt_playlist_info:
        yt_video_id = video_item['yt_video_id']
        video_title = video_item['video_title']
        video_thumbnail = video_item['video_thumbnail']
        searched_artist = video_item['searched_artist']
        searched_song = video_item['searched_song']
        artist_id = video_item['artist_id']

        does_video_exist = db.session.query(exists().where(YouTubeVideo.yt_video_id == yt_video_id)).scalar()

        if does_video_exist:
            print "Video in db"
        else:
            print "Video doesn't exist. Ading to db"
            video_info = YouTubeVideo(yt_video_id=yt_video_id,
                                      video_title=video_title,
                                      video_thumbnail=video_thumbnail,
                                      searched_artist=searched_artist,
                                      searched_song=searched_song,
                                      artist_id=artist_id)

            db.session.add(video_info)
            db.session.flush()
        print "youtube, adds_yt_song_results_to_db, Video and artist_id successfully flushed to database."
コード例 #27
0
    def test_a_atobalias_balias_c_w_exists(self):
        a_to_b_alias = a_to_b.alias()
        b_alias = b.alias()

        j1 = a_to_b_alias.join(b_alias)
        j2 = a.outerjoin(j1, a.c.id == a_to_b_alias.c.a_id)

        # TODO: if we put straight a_to_b_alias here,
        # it fails to alias the columns clause.
        s = select(
            [
                a,
                a_to_b_alias.c.a_id,
                a_to_b_alias.c.b_id,
                b_alias.c.id,
                b_alias.c.a_id,
                exists()
                .select_from(c)
                .where(c.c.b_id == b_alias.c.id)
                .label(None),
            ],
            use_labels=True,
        ).select_from(j2)

        self._test(s, self._a_atobalias_balias_c_w_exists)
コード例 #28
0
ファイル: needpropertyxml.py プロジェクト: ustbgaofan/beaker
 def filter(self, joins):
     op = self.op_table[self.get_xml_attr('op', unicode, '==')]
     value = self.get_xml_attr('value', unicode, None)
     if not value:
         return (joins, None)
     if op == '__eq__':
         query = exists([1],
                 from_obj=[LabControllerDistroTree.__table__.join(LabController.__table__)])\
                 .where(LabControllerDistroTree.distro_tree_id == DistroTree.id)\
                 .where(LabController.fqdn == value)
     else:
         query = not_(exists([1],
                 from_obj=[LabControllerDistroTree.__table__.join(LabController.__table__)])\
                 .where(LabControllerDistroTree.distro_tree_id == DistroTree.id)\
                 .where(LabController.fqdn == value))
     return (joins, query)
コード例 #29
0
def fetch_submissions(hdlr_name=None):
    """
    :param hdlr_name: Name of the handler that requests subsmissions.
    :return:
        Return submissions with 'pid' greater than the milestones added by the
        handler with 'handler_name' and sorted by 'pid', which is equivalent to
        sorted by 'submit_time'.
        If hdlr_name is not specified or there are not any milestones under
        the name of a handler, all submissions are returned.
        An empty list is returned if there are no available submissions.
    :rtype: [Submission]
    :caller: Handler
    """
    mlst = select([t_milestone.c.submission_pid, t_milestone.c.handler_name]) \
        .where(t_milestone.c.handler_name == hdlr_name) \
        .order_by(t_milestone.c.submission_pid.desc()).limit(1)
    mlst = union(select([mlst]), select([None, None]).where(~exists(mlst)))
    s = select([t.c.oj,
                t.c.problem_id,
                t.c.problem_title,
                t.c.problem_url,
                t.c.submit_time,
                t.c.timezone,
                t.c.pid]) \
        .where((mlst.c.submission_pid == None) |
               (t.c.pid > mlst.c.submission_pid) & (mlst.c.handler_name == hdlr_name)) \
        .order_by(t.c.pid)
    with engine.connect() as conn:
        return [Submission(*d) for d in conn.execute(s)]
コード例 #30
0
def test_email_check():
    session, db = session_init()
    if session.query(exists().where(Emails.Email == 'sigmail.com')).scalar():
        print('Its here!')
    else:
        print('Nah')
コード例 #31
0
ファイル: bibfiles_db.py プロジェクト: zogamnuam/glottolog
 def onetoone(cls, bind):
     other = sa.orm.aliased(cls)
     return sa.select([~sa.exists().select_from(cls).where(sa.exists().where(sa.or_(
         sa.and_(other.hash == cls.hash, other.id != cls.id),
         sa.and_(other.id == cls.hash, other.hash != cls.hash))))], bind=bind).scalar()
コード例 #32
0
ファイル: ub.py プロジェクト: ThunderstormsZJ/calibre-web
def migrate_Database(_session):
    engine = _session.bind
    add_missing_tables(engine, _session)
    migrate_registration_table(engine, _session)
    migrate_readBook(engine, _session)
    migrate_remoteAuthToken(engine, _session)
    migrate_shelfs(engine, _session)
    try:
        create = False
        _session.query(exists().where(User.sidebar_view)).scalar()
    except exc.OperationalError:  # Database is not compatible, some columns are missing
        with engine.connect() as conn:
            conn.execute(
                "ALTER TABLE user ADD column `sidebar_view` Integer DEFAULT 1")
        _session.commit()
        create = True
    try:
        if create:
            with engine.connect() as conn:
                conn.execute("SELECT language_books FROM user")
            _session.commit()
    except exc.OperationalError:
        with engine.connect() as conn:
            conn.execute(
                "UPDATE user SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang "
                "+ series_books * :side_series + category_books * :side_category + hot_books * "
                ":side_hot + :side_autor + :detail_random)", {
                    'side_random': constants.SIDEBAR_RANDOM,
                    'side_lang': constants.SIDEBAR_LANGUAGE,
                    'side_series': constants.SIDEBAR_SERIES,
                    'side_category': constants.SIDEBAR_CATEGORY,
                    'side_hot': constants.SIDEBAR_HOT,
                    'side_autor': constants.SIDEBAR_AUTHOR,
                    'detail_random': constants.DETAIL_RANDOM
                })
        _session.commit()
    try:
        _session.query(exists().where(User.denied_tags)).scalar()
    except exc.OperationalError:  # Database is not compatible, some columns are missing
        with engine.connect() as conn:
            conn.execute(
                "ALTER TABLE user ADD column `denied_tags` String DEFAULT ''")
            conn.execute(
                "ALTER TABLE user ADD column `allowed_tags` String DEFAULT ''")
            conn.execute(
                "ALTER TABLE user ADD column `denied_column_value` String DEFAULT ''"
            )
            conn.execute(
                "ALTER TABLE user ADD column `allowed_column_value` String DEFAULT ''"
            )
        _session.commit()
    try:
        _session.query(exists().where(User.view_settings)).scalar()
    except exc.OperationalError:
        with engine.connect() as conn:
            conn.execute(
                "ALTER TABLE user ADD column `view_settings` VARCHAR(10) DEFAULT '{}'"
            )
        _session.commit()
    try:
        _session.query(exists().where(User.kobo_only_shelves_sync)).scalar()
    except exc.OperationalError:
        with engine.connect() as conn:
            conn.execute(
                "ALTER TABLE user ADD column `kobo_only_shelves_sync` SMALLINT DEFAULT 0"
            )
        _session.commit()

    try:
        # check if name is in User table instead of nickname
        _session.query(exists().where(User.name)).scalar()
    except exc.OperationalError:
        # Create new table user_id and copy contents of table user into it
        with engine.connect() as conn:
            conn.execute(
                text(
                    "CREATE TABLE user_id (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,"
                    "name VARCHAR(64),"
                    "email VARCHAR(120),"
                    "role SMALLINT,"
                    "password VARCHAR,"
                    "kindle_mail VARCHAR(120),"
                    "locale VARCHAR(2),"
                    "sidebar_view INTEGER,"
                    "default_language VARCHAR(3),"
                    "denied_tags VARCHAR,"
                    "allowed_tags VARCHAR,"
                    "denied_column_value VARCHAR,"
                    "allowed_column_value VARCHAR,"
                    "view_settings JSON,"
                    "kobo_only_shelves_sync SMALLINT,"
                    "UNIQUE (name),"
                    "UNIQUE (email))"))
            conn.execute(
                text(
                    "INSERT INTO user_id(id, name, email, role, password, kindle_mail,locale,"
                    "sidebar_view, default_language, denied_tags, allowed_tags, denied_column_value, "
                    "allowed_column_value, view_settings, kobo_only_shelves_sync)"
                    "SELECT id, nickname, email, role, password, kindle_mail, locale,"
                    "sidebar_view, default_language, denied_tags, allowed_tags, denied_column_value, "
                    "allowed_column_value, view_settings, kobo_only_shelves_sync FROM user"
                ))
            # delete old user table and rename new user_id table to user:
            conn.execute(text("DROP TABLE user"))
            conn.execute(text("ALTER TABLE user_id RENAME TO user"))
        _session.commit()
    if _session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() \
       is None:
        create_anonymous_user(_session)

    migrate_guest_password(engine)
コード例 #33
0
def insert_multiple(objects_: List[Dict[str,
                                        Any]],
                    session: scoped_session,
                    id_: Optional[str] = "") -> List[int]:
    """
    Adds a list of object with given ids to the database
    :param objects_: List of dict's to be added to the database
    :param session: scoped session from getSession in utils
    :param id_: optional parameter containing the ids of objects that have to be inserted
    :return: Ids that have been inserted
    """
    # instance list to store instances
    instance_list = list()
    triples_list = list()
    properties_list = list()
    instances = list()
    id_list = id_.split(',')
    instance_id_list = list()

    # the number of objects would be the same as number of instances
    for index in range(len(objects_)):
        try:
            rdf_class = session.query(RDFClass).filter(
                RDFClass.name == objects_[index]["@type"]).one()
        except NoResultFound:
            raise ClassNotFound(type_=objects_[index]["@type"])
        if index in range(len(id_list)) and id_list[index] != "":
            if session.query(
                    exists().where(
                        Instance.id == id_list[index])).scalar():
                print(session.query(
                    exists().where(
                        Instance.id == id_list[index])))
                # TODO handle where intance already exists , if instance is
                # fetched later anyways remove this
                raise InstanceExists(type_=rdf_class.name, id_=id_list[index])
            else:
                instance = Instance(id=id_list[index], type_=rdf_class.id)
                instances.append(instance)
        else:
            instance = Instance(type_=rdf_class.id)
            instances.append(instance)

    session.add_all(instances)
    session.flush()
    for i in range(len(instances)):
        instance_id_list.append(instances[i].id)

    for index in range(len(objects_)):
        for prop_name in objects_[index]:
            if prop_name not in ["@type", "@context"]:
                try:
                    property_ = session.query(properties).filter(
                        properties.name == prop_name).one()
                except NoResultFound:
                    # Adds new Property
                    session.close()
                    raise PropertyNotFound(type_=prop_name)

                # For insertion in III
                if isinstance(objects_[index][prop_name], dict):
                    instance_id = insert(
                        objects_[index][prop_name], session=session)
                    instance_object = session.query(Instance).filter(
                        Instance.id == instance_id).one()

                    if property_.type_ == "PROPERTY" or property_.type_ == "INSTANCE":
                        property_.type_ = "INSTANCE"
                        properties_list.append(property_)
                        triple = GraphIII(
                            subject=instances[index].id,
                            predicate=property_.id,
                            object_=instance_object.id)
                        triples_list.append(triple)
                    else:
                        session.close()
                        raise NotInstanceProperty(type_=prop_name)

                # For insertion in IAC
                elif session.query(
                        exists().where(RDFClass.name == str(objects_[index][prop_name]))).scalar():
                    if property_.type_ == "PROPERTY" or property_.type_ == "ABSTRACT":
                        property_.type_ = "ABSTRACT"
                        properties_list.append(property_)
                        class_ = session.query(RDFClass).filter(
                            RDFClass.name == objects_[index][prop_name]).one()
                        triple = GraphIAC(
                            subject=instances[index].id,
                            predicate=property_.id,
                            object_=class_.id)
                        triples_list.append(triple)

                    else:
                        session.close()
                        raise NotAbstractProperty(type_=prop_name)

                # For insertion in IIT
                else:
                    terminal = Terminal(value=objects_[index][prop_name])
                    session.add(terminal)
                    session.flush()  # Assigns ID without committing

                    if property_.type_ == "PROPERTY" or property_.type_ == "INSTANCE":
                        property_.type_ = "INSTANCE"
                        properties_list.append(property_)
                        triple = GraphIIT(
                            subject=instances[index].id,
                            predicate=property_.id,
                            object_=terminal.id)
                        # Add things directly to session, if anything fails
                        # whole transaction is aborted
                        triples_list.append(triple)
                    else:
                        session.close()
                        raise NotInstanceProperty(type_=prop_name)
    session.bulk_save_objects(properties_list)
    session.bulk_save_objects(triples_list)
    session.commit()
    return instance_id_list
コード例 #34
0
def user_exists(db_session, email):
    return db_session.query(exists().where(User.email == email)).scalar()
コード例 #35
0
ファイル: products.py プロジェクト: wickieonya/shopyo
def check(barcode):
    has_product = db.session.query(exists().where(
        Products.barcode == barcode)).scalar()
    return jsonify({"exists": has_product})
コード例 #36
0
ファイル: cache_manager.py プロジェクト: sailfish009/pybel
 def has_name_version(self, name: str, version: str) -> bool:
     """Check if there exists a network with the name/version combination in the database."""
     return self.session.query(exists().where(
         and_(Network.name == name, Network.version == version))).scalar()
コード例 #37
0
 def exists(self, id: int) -> bool:
     return self._database.query(exists().where(Tweet.id == id)).scalar()
コード例 #38
0
 def name_exists(cls, name):
     session = Database().Session
     return session.query(exists().where(cls.name == name)).scalar()
コード例 #39
0
ファイル: api.py プロジェクト: yinianfangxia/zvt-1
def data_exist(session, schema, id):
    return session.query(exists().where(and_(schema.id == id))).scalar()
コード例 #40
0
 def has_dept_role(cls):
     return exists().select_from(dept_membership_dept_role) \
         .where(cls.id == dept_membership_dept_role.c.dept_membership_id)
コード例 #41
0
 def restricted(cls):
     return exists([job_required_role.c.dept_role_id]) \
         .where(job_required_role.c.job_id == cls.id).label('restricted')
コード例 #42
0
    def insert_mrirow(self,
                      scan,
                      hasT1,
                      hasT2,
                      hasBOLD,
                      hasDWI,
                      mrifolder=rootconfig.dms.folder_mridata):
        """Insert one mriscan record."""
        # check if scan already exist
        try:
            ret = self.session.query(
                exists().where(tables.MRIScan.filename == scan)).scalar()
            if ret:
                # record exists
                return 0
        except MultipleResultsFound:
            print('Error when importing: multiple scan records found for %s' %
                  scan)
            return 1

        # check MRIMachine
        scan_info = loadsave.load_json(
            os.path.join(mrifolder, scan, 'scan_info.json'))
        ret = self.session.query(exists().where(
            and_(
                tables.MRIMachine.institution == scan_info['Machine']
                ['Institution'], tables.MRIMachine.manufacturer ==
                scan_info['Machine']['Manufacturer'],
                tables.MRIMachine.modelname == scan_info['Machine']
                ['ManufacturerModelName']))).scalar()
        if ret:
            machine = self.session.query(tables.MRIMachine).filter(
                and_(
                    tables.MRIMachine.institution == scan_info['Machine']
                    ['Institution'], tables.MRIMachine.manufacturer ==
                    scan_info['Machine']['Manufacturer'],
                    tables.MRIMachine.modelname == scan_info['Machine']
                    ['ManufacturerModelName'])).one()
        else:
            # insert new MRIMachine
            machine = tables.MRIMachine(
                institution=scan_info['Machine']['Institution'],
                manufacturer=scan_info['Machine']['Manufacturer'],
                modelname=scan_info['Machine']['ManufacturerModelName'])

        # check Person
        name = scan_info['Patient']['Name']
        try:
            dateobj = datetime.datetime.strptime(scan_info['StudyDate'],
                                                 '%Y-%m-%d %H:%M:%S')
        except ValueError:
            dateobj = None
        db_mriscan = tables.MRIScan(date=dateobj,
                                    hasT1=hasT1,
                                    hasT2=hasT2,
                                    hasBOLD=hasBOLD,
                                    hasDWI=hasDWI,
                                    filename=scan)
        machine.mriscans.append(db_mriscan)
        try:
            ret = self.session.query(exists().where(
                and_(tables.Person.name == name, tables.Person.patientid ==
                     scan_info['Patient']['ID']))).scalar()
            if ret:
                person = self.session.query(tables.Person).filter(
                    and_(tables.Person.name == name, tables.Person.patientid ==
                         scan_info['Patient']['ID'])).one()
                person.mriscans.append(db_mriscan)
                self.session.add(db_mriscan)
                self.session.commit()
                print('Old patient new scan %s inserted' % scan)
                return 0
        except MultipleResultsFound:
            print(
                'Error when importing: multiple person records found for %s' %
                name)
            return 2
        db_person = tables.Person.build_person(name, scan_info)
        db_person.mriscans.append(db_mriscan)
        self.session.add(db_person)
        self.session.commit()
        print('New patient new scan %s inserted' % scan)
        return 0
コード例 #43
0
async def load_session(include_url=True, include_header=True):
    """
	Get the login session information from the cookies.

	Includes all the information needed by the master.html template.
	"""
    user_id = flask.session.get('id')
    user_name = flask.session.get('user')
    if user_id is None and user_name is not None:
        # Upgrade old session
        with server.db.engine.begin() as conn:
            user_id, = conn.execute(
                users.insert(postgresql_on_conflict="update").values(
                    id=sqlalchemy.bindparam("_id")).returning(users.c.id),
                twitch.get_user(user_name)).first()
        flask.session["id"] = user_id
    if 'user' in flask.session:
        del flask.session["user"]
    if 'apipass' in flask.request.values and flask.request.values[
            'apipass'] in from_apipass:
        user_id = from_apipass[flask.request.values["apipass"]]

    session = {}
    if include_url:
        session['url'] = flask.request.url
    else:
        session['url'] = None
    if include_header:
        session['header'] = await common.rpc.bot.get_header_info()
        if 'current_game' in session['header']:
            games = server.db.metadata.tables["games"]
            shows = server.db.metadata.tables["shows"]
            stats = server.db.metadata.tables["stats"]
            game_per_show_data = server.db.metadata.tables[
                "game_per_show_data"]
            game_stats = server.db.metadata.tables["game_stats"]
            game_votes = server.db.metadata.tables["game_votes"]
            disabled_stats = server.db.metadata.tables["disabled_stats"]
            with server.db.engine.begin() as conn:
                game_id = session['header']['current_game']['id']
                show_id = session['header']['current_show']['id']
                session['header']['current_game']['display'], = conn.execute(
                    sqlalchemy.select([
                        sqlalchemy.func.coalesce(
                            game_per_show_data.c.display_name, games.c.name),
                    ]).select_from(
                        games.outerjoin(
                            game_per_show_data,
                            (game_per_show_data.c.game_id == games.c.id) &
                            (game_per_show_data.c.show_id == show_id))).where(
                                games.c.id == game_id)).first()

                session['header']['current_show']['name'], = conn.execute(
                    sqlalchemy.select([
                        shows.c.name,
                    ]).where(shows.c.id == show_id)).first()

                good = sqlalchemy.cast(
                    sqlalchemy.func.sum(
                        sqlalchemy.cast(game_votes.c.vote,
                                        sqlalchemy.Integer)),
                    sqlalchemy.Numeric)
                rating = conn.execute(
                    sqlalchemy.select([
                        (100 * good /
                         sqlalchemy.func.count(game_votes.c.vote)),
                        good,
                        sqlalchemy.func.count(game_votes.c.vote),
                    ]).where(game_votes.c.game_id == game_id).where(
                        game_votes.c.show_id == show_id)).first()
                if rating[0] is not None and rating[1] is not None:
                    session['header']['current_game']["rating"] = {
                        'perc': rating[0],
                        'good': rating[1],
                        'total': rating[2],
                    }
                stats_query = sqlalchemy.select([
                 game_stats.c.count,
                 game_data.stat_plural(stats, game_stats.c.count)
                ]).select_from(game_stats
                 .join(stats, stats.c.id == game_stats.c.stat_id)
                ).where(game_stats.c.game_id == game_id) \
                 .where(game_stats.c.show_id == show_id) \
                 .where(~sqlalchemy.exists(sqlalchemy.select([1])
                  .where(disabled_stats.c.stat_id == game_stats.c.stat_id)
                  .where(disabled_stats.c.show_id == game_stats.c.show_id)
                 )) \
                 .order_by(game_stats.c.count.desc())
                session['header']['current_game']['stats'] = [{
                    'count': count,
                    'type': type,
                } for count, type in conn.execute(stats_query)]

    if user_id is not None:
        users = server.db.metadata.tables["users"]
        patreon_users = server.db.metadata.tables["patreon_users"]
        with server.db.engine.begin() as conn:
            query = sqlalchemy.select([
                users.c.name,
                sqlalchemy.func.coalesce(users.c.display_name, users.c.name),
                users.c.twitch_oauth, users.c.is_sub, users.c.is_mod,
                users.c.autostatus, users.c.patreon_user
            ]).where(users.c.id == user_id)
            name, display_name, token, is_sub, is_mod, autostatus, patreon_user = conn.execute(
                query).first()
            session['user'] = {
                "id": user_id,
                "name": name,
                "display_name": display_name,
                "twitch_oauth": token,
                "is_sub": is_sub,
                "is_mod": is_mod,
                "autostatus": autostatus,
                "patreon_user": patreon_user,
            }
    else:
        session['user'] = {
            "id": None,
            "name": None,
            "display_name": None,
            "twitch_oauth": None,
            "is_sub": False,
            "is_mod": False,
            "autostatus": False,
        }
    return session
コード例 #44
0
ファイル: bibfiles_db.py プロジェクト: zogamnuam/glottolog
 def allid(cls, bind):
     return sa.select([~sa.exists().where(cls.id == sa.null())], bind=bind).scalar()
コード例 #45
0
    def get_events(self, event_filter, limit=None):
        """Return an iterable of model.Event objects.

        :param event_filter: EventFilter instance
        """
        if limit == 0:
            return
        session = self._engine_facade.get_session()
        with session.begin():
            # Build up the join conditions
            event_join_conditions = [
                models.EventType.id == models.Event.event_type_id
            ]

            if event_filter.event_type:
                event_join_conditions.append(
                    models.EventType.desc == event_filter.event_type)

            # Build up the where conditions
            event_filter_conditions = []
            if event_filter.message_id:
                event_filter_conditions.append(
                    models.Event.message_id == event_filter.message_id)
            if event_filter.start_timestamp:
                event_filter_conditions.append(
                    models.Event.generated >= event_filter.start_timestamp)
            if event_filter.end_timestamp:
                event_filter_conditions.append(
                    models.Event.generated <= event_filter.end_timestamp)

            trait_subq = None
            # Build trait filter
            if event_filter.traits_filter:
                filters = list(event_filter.traits_filter)
                trait_filter = filters.pop()
                key = trait_filter.pop('key')
                op = trait_filter.pop('op', 'eq')
                trait_type, value = list(trait_filter.items())[0]
                trait_subq, t_model = _build_trait_query(
                    session, trait_type, key, value, op)
                for trait_filter in filters:
                    key = trait_filter.pop('key')
                    op = trait_filter.pop('op', 'eq')
                    trait_type, value = list(trait_filter.items())[0]
                    q, model = _build_trait_query(session, trait_type, key,
                                                  value, op)
                    trait_subq = trait_subq.filter(
                        q.filter(model.event_id == t_model.event_id).exists())
                trait_subq = trait_subq.subquery()

            query = (session.query(models.Event.id).join(
                models.EventType, sa.and_(*event_join_conditions)))
            if trait_subq is not None:
                query = query.join(trait_subq,
                                   trait_subq.c.ev_id == models.Event.id)
            if event_filter_conditions:
                query = query.filter(sa.and_(*event_filter_conditions))

            query = query.order_by(models.Event.generated).limit(limit)
            event_list = {}
            # get a list of all events that match filters
            for (id_, generated, message_id, desc, raw) in query.add_columns(
                    models.Event.generated, models.Event.message_id,
                    models.EventType.desc, models.Event.raw).all():
                event_list[id_] = api_models.Event(message_id, desc, generated,
                                                   [], raw)
            # Query all traits related to events.
            # NOTE (gordc): cast is done because pgsql defaults to TEXT when
            #               handling unknown values such as null.
            trait_q = (session.query(
                models.TraitDatetime.event_id, models.TraitDatetime.key,
                models.TraitDatetime.value, sa.cast(sa.null(), sa.Integer),
                sa.cast(sa.null(), sa.Float(53)),
                sa.cast(sa.null(), sa.String(255))).filter(sa.exists().where(
                    models.TraitDatetime.event_id == query.subquery().c.id))
                       ).union_all(
                           session.query(models.TraitInt.event_id,
                                         models.TraitInt.key, sa.null(),
                                         models.TraitInt.value, sa.null(),
                                         sa.null()).filter(sa.exists().where(
                                             models.TraitInt.event_id ==
                                             query.subquery().c.id)),
                           session.query(models.TraitFloat.event_id,
                                         models.TraitFloat.key, sa.null(),
                                         sa.null(), models.TraitFloat.value,
                                         sa.null()).filter(sa.exists().where(
                                             models.TraitFloat.event_id ==
                                             query.subquery().c.id)),
                           session.query(models.TraitText.event_id,
                                         models.TraitText.key, sa.null(),
                                         sa.null(), sa.null(),
                                         models.TraitText.value).filter(
                                             sa.exists().where(
                                                 models.TraitText.event_id ==
                                                 query.subquery().c.id)))

            for id_, key, t_date, t_int, t_float, t_text in (trait_q.order_by(
                    models.TraitDatetime.key)).all():
                if t_int is not None:
                    dtype = api_models.Trait.INT_TYPE
                    val = t_int
                elif t_float is not None:
                    dtype = api_models.Trait.FLOAT_TYPE
                    val = t_float
                elif t_date is not None:
                    dtype = api_models.Trait.DATETIME_TYPE
                    val = t_date
                else:
                    dtype = api_models.Trait.TEXT_TYPE
                    val = t_text

                trait_model = api_models.Trait(key, dtype, val)
                event_list[id_].append_trait(trait_model)

            return event_list.values()
コード例 #46
0
ファイル: volatile_replica.py プロジェクト: ijjorama/rucio
def add_volatile_replicas(rse_id, replicas, session=None):
    """
    Bulk add volatile replicas.

    :param rse_id: the rse id.
    :param replicas: the list of volatile replicas.
    :param session: The database session in use.
    :returns: True is successful.
    """
    # first check that the rse is a volatile one
    try:
        session.query(models.RSE.id).filter_by(rse_id=rse_id,
                                               volatile=True).one()
    except NoResultFound:
        raise exception.UnsupportedOperation(
            'No volatile rse found for %s !' %
            get_rse_name(rse_id=rse_id, session=session))

    file_clause, replica_clause = [], []
    for replica in replicas:
        file_clause.append(
            and_(
                models.DataIdentifier.scope == replica['scope'],
                models.DataIdentifier.name == replica['name'], ~exists(
                    select([1]).prefix_with(
                        "/*+ INDEX(REPLICAS REPLICAS_PK) */", dialect='oracle')
                ).where(
                    and_(models.RSEFileAssociation.scope == replica['scope'],
                         models.RSEFileAssociation.name == replica['name'],
                         models.RSEFileAssociation.rse_id == rse_id))))
        replica_clause.append(
            and_(models.RSEFileAssociation.scope == replica['scope'],
                 models.RSEFileAssociation.name == replica['name'],
                 models.RSEFileAssociation.rse_id == rse_id))

    if replica_clause:
        now = datetime.utcnow()
        session.query(models.RSEFileAssociation).\
            with_hint(models.RSEFileAssociation, "index(REPLICAS REPLICAS_PK)", 'oracle').\
            filter(or_(*replica_clause)).\
            update({'updated_at': now, 'tombstone': now}, synchronize_session=False)

    if file_clause:
        file_query = session.query(models.DataIdentifier.scope,
                                   models.DataIdentifier.name,
                                   models.DataIdentifier.bytes,
                                   models.DataIdentifier.md5,
                                   models.DataIdentifier.adler32).\
            filter(or_(*file_clause))

        session.bulk_insert_mappings(
            models.RSEFileAssociation, [{
                'rse_id': rse_id,
                'adler32': adler32,
                'state': ReplicaState.AVAILABLE,
                'scope': scope,
                'name': name,
                'lock_cnt': 0,
                'tombstone': datetime.utcnow(),
                'bytes': bytes,
                'md5': md5
            } for scope, name, bytes, md5, adler32 in file_query])
コード例 #47
0
 def exists(self):
     return session.query(exists().where(
         User.user_id == self.user_id)).scalar()
コード例 #48
0
def file_upload(request):
    # If we're in read-only mode, let upload clients know
    if request.flags.enabled("read-only"):
        raise _exc_with_message(
            HTTPForbidden, "Read-only mode: Uploads are temporarily disabled")

    # Before we do anything, if there isn't an authenticated user with this
    # request, then we'll go ahead and bomb out.
    if request.authenticated_userid is None:
        raise _exc_with_message(
            HTTPForbidden,
            "Invalid or non-existent authentication information.")

    # Ensure that user has a verified, primary email address. This should both
    # reduce the ease of spam account creation and activty, as well as act as
    # a forcing function for https://github.com/pypa/warehouse/issues/3632.
    # TODO: Once https://github.com/pypa/warehouse/issues/3632 has been solved,
    #       we might consider a different condition, possibly looking at
    #       User.is_active instead.
    if not (request.user.primary_email
            and request.user.primary_email.verified):
        raise _exc_with_message(
            HTTPBadRequest,
            ("User {!r} does not have a verified primary email address. "
             "Please add a verified primary email before attempting to "
             "upload to PyPI. See {project_help} for more information."
             "for more information.").format(
                 request.user.username,
                 project_help=request.help_url(_anchor="verified-email"),
             ),
        ) from None

    # Do some cleanup of the various form fields
    for key in list(request.POST):
        value = request.POST.get(key)
        if isinstance(value, str):
            # distutils "helpfully" substitutes unknown, but "required" values
            # with the string "UNKNOWN". This is basically never what anyone
            # actually wants so we'll just go ahead and delete anything whose
            # value is UNKNOWN.
            if value.strip() == "UNKNOWN":
                del request.POST[key]

            # Escape NUL characters, which psycopg doesn't like
            if "\x00" in value:
                request.POST[key] = value.replace("\x00", "\\x00")

    # We require protocol_version 1, it's the only supported version however
    # passing a different version should raise an error.
    if request.POST.get("protocol_version", "1") != "1":
        raise _exc_with_message(HTTPBadRequest, "Unknown protocol version.")

    # Check if any fields were supplied as a tuple and have become a
    # FieldStorage. The 'content' and 'gpg_signature' fields _should_ be a
    # FieldStorage, however.
    # ref: https://github.com/pypa/warehouse/issues/2185
    # ref: https://github.com/pypa/warehouse/issues/2491
    for field in set(request.POST) - {"content", "gpg_signature"}:
        values = request.POST.getall(field)
        if any(isinstance(value, FieldStorage) for value in values):
            raise _exc_with_message(HTTPBadRequest,
                                    f"{field}: Should not be a tuple.")

    # Look up all of the valid classifiers
    all_classifiers = request.db.query(Classifier).all()

    # Validate and process the incoming metadata.
    form = MetadataForm(request.POST)

    # Add a validator for deprecated classifiers
    form.classifiers.validators.append(_no_deprecated_classifiers(request))

    form.classifiers.choices = [(c.classifier, c.classifier)
                                for c in all_classifiers]
    if not form.validate():
        for field_name in _error_message_order:
            if field_name in form.errors:
                break
        else:
            field_name = sorted(form.errors.keys())[0]

        if field_name in form:
            field = form[field_name]
            if field.description and isinstance(field, wtforms.StringField):
                error_message = (
                    "{value!r} is an invalid value for {field}. ".format(
                        value=field.data, field=field.description) +
                    "Error: {} ".format(form.errors[field_name][0]) + "See "
                    "https://packaging.python.org/specifications/core-metadata"
                )
            else:
                error_message = "Invalid value for {field}. Error: {msgs[0]}".format(
                    field=field_name, msgs=form.errors[field_name])
        else:
            error_message = "Error: {}".format(form.errors[field_name][0])

        raise _exc_with_message(HTTPBadRequest, error_message)

    # Ensure that we have file data in the request.
    if "content" not in request.POST:
        raise _exc_with_message(HTTPBadRequest,
                                "Upload payload does not have a file.")

    # Look up the project first before doing anything else, this is so we can
    # automatically register it if we need to and can check permissions before
    # going any further.
    try:
        project = (request.db.query(Project).filter(
            Project.normalized_name == func.normalize_pep426_name(
                form.name.data)).one())
    except NoResultFound:
        # Check for AdminFlag set by a PyPI Administrator disabling new project
        # registration, reasons for this include Spammers, security
        # vulnerabilities, or just wanting to be lazy and not worry ;)
        if request.flags.enabled("disallow-new-project-registration"):
            raise _exc_with_message(
                HTTPForbidden,
                ("New project registration temporarily disabled. "
                 "See {projecthelp} for details").format(
                     projecthelp=request.help_url(
                         _anchor="admin-intervention")),
            ) from None

        # Before we create the project, we're going to check our blacklist to
        # see if this project is even allowed to be registered. If it is not,
        # then we're going to deny the request to create this project.
        if request.db.query(exists().where(
                BlacklistedProject.name == func.normalize_pep426_name(
                    form.name.data))).scalar():
            raise _exc_with_message(
                HTTPBadRequest,
                ("The name {name!r} isn't allowed. "
                 "See {projecthelp} "
                 "for more information.").format(
                     name=form.name.data,
                     projecthelp=request.help_url(_anchor="project-name"),
                 ),
            ) from None

        # Also check for collisions with Python Standard Library modules.
        if packaging.utils.canonicalize_name(
                form.name.data) in STDLIB_PROHIBITTED:
            raise _exc_with_message(
                HTTPBadRequest,
                ("The name {name!r} isn't allowed (conflict with Python "
                 "Standard Library module name). See "
                 "{projecthelp} for more information.").format(
                     name=form.name.data,
                     projecthelp=request.help_url(_anchor="project-name"),
                 ),
            ) from None

        # The project doesn't exist in our database, so first we'll check for
        # projects with a similar name
        squattees = (request.db.query(Project).filter(
            func.levenshtein(Project.normalized_name,
                             func.normalize_pep426_name(form.name.data)) <= 2).
                     all())

        # Next we'll create the project
        project = Project(name=form.name.data)
        request.db.add(project)

        # Now that the project exists, add any squats which it is the squatter for
        for squattee in squattees:
            request.db.add(Squat(squatter=project, squattee=squattee))

        # Then we'll add a role setting the current user as the "Owner" of the
        # project.
        request.db.add(
            Role(user=request.user, project=project, role_name="Owner"))
        # TODO: This should be handled by some sort of database trigger or a
        #       SQLAlchemy hook or the like instead of doing it inline in this
        #       view.
        request.db.add(
            JournalEntry(
                name=project.name,
                action="create",
                submitted_by=request.user,
                submitted_from=request.remote_addr,
            ))
        request.db.add(
            JournalEntry(
                name=project.name,
                action="add Owner {}".format(request.user.username),
                submitted_by=request.user,
                submitted_from=request.remote_addr,
            ))

    # Check that the user has permission to do things to this project, if this
    # is a new project this will act as a sanity check for the role we just
    # added above.
    if not request.has_permission("upload", project):
        raise _exc_with_message(
            HTTPForbidden,
            ("The user '{0}' isn't allowed to upload to project '{1}'. "
             "See {2} for more information.").format(
                 request.user.username,
                 project.name,
                 request.help_url(_anchor="project-name"),
             ),
        )

    # Uploading should prevent broken rendered descriptions.
    # Temporarily disabled, see
    # https://github.com/pypa/warehouse/issues/4079
    # if form.description.data:
    #     description_content_type = form.description_content_type.data
    #     if not description_content_type:
    #         description_content_type = "text/x-rst"
    #     rendered = readme.render(
    #         form.description.data, description_content_type, use_fallback=False
    #     )

    #     if rendered is None:
    #         if form.description_content_type.data:
    #             message = (
    #                 "The description failed to render "
    #                 "for '{description_content_type}'."
    #             ).format(description_content_type=description_content_type)
    #         else:
    #             message = (
    #                 "The description failed to render "
    #                 "in the default format of reStructuredText."
    #             )
    #         raise _exc_with_message(
    #             HTTPBadRequest,
    #             "{message} See {projecthelp} for more information.".format(
    #                 message=message,
    #                 projecthelp=request.help_url(_anchor="description-content-type"),
    #             ),
    #         ) from None

    try:
        canonical_version = packaging.utils.canonicalize_version(
            form.version.data)
        release = (request.db.query(Release).filter(
            (Release.project == project)
            & (Release.canonical_version == canonical_version)).one())
    except MultipleResultsFound:
        # There are multiple releases of this project which have the same
        # canonical version that were uploaded before we checked for
        # canonical version equivalence, so return the exact match instead
        release = (request.db.query(
            Release).filter((Release.project == project)
                            & (Release.version == form.version.data)).one())
    except NoResultFound:
        release = Release(
            project=project,
            _classifiers=[
                c for c in all_classifiers
                if c.classifier in form.classifiers.data
            ],
            dependencies=list(
                _construct_dependencies(
                    form,
                    {
                        "requires": DependencyKind.requires,
                        "provides": DependencyKind.provides,
                        "obsoletes": DependencyKind.obsoletes,
                        "requires_dist": DependencyKind.requires_dist,
                        "provides_dist": DependencyKind.provides_dist,
                        "obsoletes_dist": DependencyKind.obsoletes_dist,
                        "requires_external": DependencyKind.requires_external,
                        "project_urls": DependencyKind.project_url,
                    },
                )),
            canonical_version=canonical_version,
            **{
                k: getattr(form, k).data
                for k in {
                    # This is a list of all the fields in the form that we
                    # should pull off and insert into our new release.
                    "version",
                    "summary",
                    "description",
                    "description_content_type",
                    "license",
                    "author",
                    "author_email",
                    "maintainer",
                    "maintainer_email",
                    "keywords",
                    "platform",
                    "home_page",
                    "download_url",
                    "requires_python",
                }
            },
            uploader=request.user,
            uploaded_via=request.user_agent,
        )
        request.db.add(release)
        # TODO: This should be handled by some sort of database trigger or
        #       a SQLAlchemy hook or the like instead of doing it inline in
        #       this view.
        request.db.add(
            JournalEntry(
                name=release.project.name,
                version=release.version,
                action="new release",
                submitted_by=request.user,
                submitted_from=request.remote_addr,
            ))

    # TODO: We need a better solution to this than to just do it inline inside
    #       this method. Ideally the version field would just be sortable, but
    #       at least this should be some sort of hook or trigger.
    releases = (request.db.query(Release).filter(
        Release.project == project).options(
            orm.load_only(Release._pypi_ordering)).all())
    for i, r in enumerate(
            sorted(releases,
                   key=lambda x: packaging.version.parse(x.version))):
        r._pypi_ordering = i

    # Pull the filename out of our POST data.
    filename = request.POST["content"].filename

    # Make sure that the filename does not contain any path separators.
    if "/" in filename or "\\" in filename:
        raise _exc_with_message(
            HTTPBadRequest,
            "Cannot upload a file with '/' or '\\' in the name.")

    # Make sure the filename ends with an allowed extension.
    if _dist_file_regexes[project.allow_legacy_files].search(filename) is None:
        raise _exc_with_message(
            HTTPBadRequest,
            "Invalid file extension: Use .egg, .tar.gz, .whl or .zip "
            "extension. (https://www.python.org/dev/peps/pep-0527)",
        )

    # Make sure that our filename matches the project that it is being uploaded
    # to.
    prefix = pkg_resources.safe_name(project.name).lower()
    if not pkg_resources.safe_name(filename).lower().startswith(prefix):
        raise _exc_with_message(
            HTTPBadRequest,
            "Start filename for {!r} with {!r}.".format(project.name, prefix),
        )

    # Check the content type of what is being uploaded
    if not request.POST["content"].type or request.POST[
            "content"].type.startswith("image/"):
        raise _exc_with_message(HTTPBadRequest, "Invalid distribution file.")

    # Ensure that the package filetype is allowed.
    # TODO: Once PEP 527 is completely implemented we should be able to delete
    #       this and just move it into the form itself.
    if not project.allow_legacy_files and form.filetype.data not in {
            "sdist",
            "bdist_wheel",
            "bdist_egg",
    }:
        raise _exc_with_message(HTTPBadRequest, "Unknown type of file.")

    # The project may or may not have a file size specified on the project, if
    # it does then it may or may not be smaller or larger than our global file
    # size limits.
    file_size_limit = max(filter(None, [MAX_FILESIZE, project.upload_limit]))

    with tempfile.TemporaryDirectory() as tmpdir:
        temporary_filename = os.path.join(tmpdir, filename)

        # Buffer the entire file onto disk, checking the hash of the file as we
        # go along.
        with open(temporary_filename, "wb") as fp:
            file_size = 0
            file_hashes = {
                "md5": hashlib.md5(),
                "sha256": hashlib.sha256(),
                "blake2_256": hashlib.blake2b(digest_size=256 // 8),
            }
            for chunk in iter(lambda: request.POST["content"].file.read(8096),
                              b""):
                file_size += len(chunk)
                if file_size > file_size_limit:
                    raise _exc_with_message(
                        HTTPBadRequest,
                        "File too large. " +
                        "Limit for project {name!r} is {limit} MB. ".format(
                            name=project.name,
                            limit=file_size_limit // (1024 * 1024)) + "See " +
                        request.help_url(_anchor="file-size-limit"),
                    )
                fp.write(chunk)
                for hasher in file_hashes.values():
                    hasher.update(chunk)

        # Take our hash functions and compute the final hashes for them now.
        file_hashes = {
            k: h.hexdigest().lower()
            for k, h in file_hashes.items()
        }

        # Actually verify the digests that we've gotten. We're going to use
        # hmac.compare_digest even though we probably don't actually need to
        # because it's better safe than sorry. In the case of multiple digests
        # we expect them all to be given.
        if not all([
                hmac.compare_digest(
                    getattr(form,
                            "{}_digest".format(digest_name)).data.lower(),
                    digest_value,
                ) for digest_name, digest_value in file_hashes.items()
                if getattr(form, "{}_digest".format(digest_name)).data
        ]):
            raise _exc_with_message(
                HTTPBadRequest,
                "The digest supplied does not match a digest calculated "
                "from the uploaded file.",
            )

        # Check to see if the file that was uploaded exists already or not.
        is_duplicate = _is_duplicate_file(request.db, filename, file_hashes)
        if is_duplicate:
            return Response()
        elif is_duplicate is not None:
            raise _exc_with_message(
                HTTPBadRequest,
                # Note: Changing this error message to something that doesn't
                # start with "File already exists" will break the
                # --skip-existing functionality in twine
                # ref: https://github.com/pypa/warehouse/issues/3482
                # ref: https://github.com/pypa/twine/issues/332
                "File already exists. See " +
                request.help_url(_anchor="file-name-reuse"),
            )

        # Check to see if the file that was uploaded exists in our filename log
        if request.db.query(
                request.db.query(Filename).filter(
                    Filename.filename == filename).exists()).scalar():
            raise _exc_with_message(
                HTTPBadRequest,
                "This filename has already been used, use a "
                "different version. "
                "See " + request.help_url(_anchor="file-name-reuse"),
            )

        # Check to see if uploading this file would create a duplicate sdist
        # for the current release.
        if (form.filetype.data == "sdist" and request.db.query(
                request.db.query(File).filter((File.release == release) & (
                    File.packagetype == "sdist")).exists()).scalar()):
            raise _exc_with_message(
                HTTPBadRequest, "Only one sdist may be uploaded per release.")

        # Check the file to make sure it is a valid distribution file.
        if not _is_valid_dist_file(temporary_filename, form.filetype.data):
            raise _exc_with_message(HTTPBadRequest,
                                    "Invalid distribution file.")

        # Check that if it's a binary wheel, it's on a supported platform
        if filename.endswith(".whl"):
            wheel_info = _wheel_file_re.match(filename)
            plats = wheel_info.group("plat").split(".")
            for plat in plats:
                if not _valid_platform_tag(plat):
                    raise _exc_with_message(
                        HTTPBadRequest,
                        "Binary wheel '{filename}' has an unsupported "
                        "platform tag '{plat}'.".format(filename=filename,
                                                        plat=plat),
                    )

        # Also buffer the entire signature file to disk.
        if "gpg_signature" in request.POST:
            has_signature = True
            with open(os.path.join(tmpdir, filename + ".asc"), "wb") as fp:
                signature_size = 0
                for chunk in iter(
                        lambda: request.POST["gpg_signature"].file.read(8096),
                        b""):
                    signature_size += len(chunk)
                    if signature_size > MAX_SIGSIZE:
                        raise _exc_with_message(HTTPBadRequest,
                                                "Signature too large.")
                    fp.write(chunk)

            # Check whether signature is ASCII armored
            with open(os.path.join(tmpdir, filename + ".asc"), "rb") as fp:
                if not fp.read().startswith(b"-----BEGIN PGP SIGNATURE-----"):
                    raise _exc_with_message(
                        HTTPBadRequest, "PGP signature isn't ASCII armored.")
        else:
            has_signature = False

        # TODO: This should be handled by some sort of database trigger or a
        #       SQLAlchemy hook or the like instead of doing it inline in this
        #       view.
        request.db.add(Filename(filename=filename))

        # Store the information about the file in the database.
        file_ = File(
            release=release,
            filename=filename,
            python_version=form.pyversion.data,
            packagetype=form.filetype.data,
            comment_text=form.comment.data,
            size=file_size,
            has_signature=bool(has_signature),
            md5_digest=file_hashes["md5"],
            sha256_digest=file_hashes["sha256"],
            blake2_256_digest=file_hashes["blake2_256"],
            # Figure out what our filepath is going to be, we're going to use a
            # directory structure based on the hash of the file contents. This
            # will ensure that the contents of the file cannot change without
            # it also changing the path that the file is saved too.
            path="/".join([
                file_hashes[PATH_HASHER][:2],
                file_hashes[PATH_HASHER][2:4],
                file_hashes[PATH_HASHER][4:],
                filename,
            ]),
            uploaded_via=request.user_agent,
        )
        request.db.add(file_)

        # TODO: This should be handled by some sort of database trigger or a
        #       SQLAlchemy hook or the like instead of doing it inline in this
        #       view.
        request.db.add(
            JournalEntry(
                name=release.project.name,
                version=release.version,
                action="add {python_version} file {filename}".format(
                    python_version=file_.python_version,
                    filename=file_.filename),
                submitted_by=request.user,
                submitted_from=request.remote_addr,
            ))

        # TODO: We need a better answer about how to make this transactional so
        #       this won't take affect until after a commit has happened, for
        #       now we'll just ignore it and save it before the transaction is
        #       committed.
        storage = request.find_service(IFileStorage)
        storage.store(
            file_.path,
            os.path.join(tmpdir, filename),
            meta={
                "project": file_.release.project.normalized_name,
                "version": file_.release.version,
                "package-type": file_.packagetype,
                "python-version": file_.python_version,
            },
        )
        if has_signature:
            storage.store(
                file_.pgp_path,
                os.path.join(tmpdir, filename + ".asc"),
                meta={
                    "project": file_.release.project.normalized_name,
                    "version": file_.release.version,
                    "package-type": file_.packagetype,
                    "python-version": file_.python_version,
                },
            )

    return Response()
コード例 #49
0
    def oai_query(self,
                  offset=0,
                  batch_size=20,
                  needed_sets=None,
                  disallowed_sets=None,
                  allowed_sets=None,
                  from_date=None,
                  until_date=None,
                  identifier=None):

        needed_sets = needed_sets or []
        disallowed_sets = disallowed_sets or []
        allowed_sets = allowed_sets or []
        if batch_size < 0:
            batch_size = 0

        # make sure until date is set, and not in future
        if until_date == None or until_date > datetime.datetime.utcnow():
            until_date = datetime.datetime.utcnow()

        query = self._records.select(
            order_by=[sql.desc(self._records.c.modified)])

        # filter dates
        query.append_whereclause(self._records.c.modified <= until_date)

        if not identifier is None:
            query.append_whereclause(self._records.c.record_id == identifier)

        if not from_date is None:
            query.append_whereclause(self._records.c.modified >= from_date)

        # filter sets

        setclauses = []
        for set_id in needed_sets:
            alias = self._setrefs.alias()
            setclauses.append(
                sql.and_(alias.c.set_id == set_id,
                         alias.c.record_id == self._records.c.record_id))

        if setclauses:
            query.append_whereclause((sql.and_(*setclauses)))

        allowed_setclauses = []
        for set_id in allowed_sets:
            alias = self._setrefs.alias()
            allowed_setclauses.append(
                sql.and_(alias.c.set_id == set_id,
                         alias.c.record_id == self._records.c.record_id))

        if allowed_setclauses:
            query.append_whereclause(sql.or_(*allowed_setclauses))

        disallowed_setclauses = []
        for set_id in disallowed_sets:
            alias = self._setrefs.alias()
            disallowed_setclauses.append(
                sql.exists(
                    [self._records.c.record_id],
                    sql.and_(alias.c.set_id == set_id,
                             alias.c.record_id == self._records.c.record_id)))

        if disallowed_setclauses:
            query.append_whereclause(sql.not_(sql.or_(*disallowed_setclauses)))

        for row in query.distinct().offset(offset).limit(batch_size).execute():
            yield {
                'id': row.record_id,
                'deleted': row.deleted,
                'modified': row.modified,
                'metadata': json.loads(row.metadata),
                'sets': self.get_setrefs(row.record_id)
            }
コード例 #50
0
 def deleteByKeyAndCommit(self, key, model):
     if self.session.query(exists().where(model.key == key)).one()[0]:
         object = self.session.query(model).filter(model.key == key).first()
         self.session.delete(object)
     self.session.commit()
コード例 #51
0
def insert(object_: Dict[str, Any], session: scoped_session,
           id_: Optional[str] = None) -> str:
    """Insert an object to database [POST] and returns the inserted object.
    :param object_: object to be inserted
    :param session: sqlalchemy scoped session
    :param id_: id of the object to be inserted (optional param)
    :return: ID of object inserted
    """
    rdf_class = None
    instance = None
    # Check for class in the begging
    try:
        rdf_class = session.query(RDFClass).filter(
            RDFClass.name == object_["@type"]).one()
    except NoResultFound:
        raise ClassNotFound(type_=object_["@type"])
    if id_ is not None:
        if session.query(exists().where(Instance.id == id_)).scalar():
            raise InstanceExists(type_=rdf_class.name, id_=id_)
        else:
            instance = Instance(id=id_, type_=rdf_class.id)
    else:
        instance = Instance(type_=rdf_class.id)
    session.add(instance)
    session.flush()

    for prop_name in object_:

        if prop_name not in ["@type", "@context"]:
            try:
                property_ = session.query(properties).filter(
                    properties.name == prop_name).one()
            except NoResultFound:
                # Adds new Property
                session.close()
                raise PropertyNotFound(type_=prop_name)

            # For insertion in III
            if isinstance(object_[prop_name], dict):
                instance_id = insert(object_[prop_name], session=session)
                instance_object = session.query(Instance).filter(
                    Instance.id == instance_id).one()
                if property_.type_ == "PROPERTY" or property_.type_ == "INSTANCE":
                    property_.type_ = "INSTANCE"
                    session.add(property_)
                    triple = GraphIII(
                        subject=instance.id,
                        predicate=property_.id,
                        object_=instance_object.id)
                    session.add(triple)
                else:
                    session.close()
                    raise NotInstanceProperty(type_=prop_name)

            # For insertion in IAC
            elif session.query(exists().where(RDFClass.name == str(object_[prop_name]))).scalar():
                if property_.type_ == "PROPERTY" or property_.type_ == "ABSTRACT":
                    property_.type_ = "ABSTRACT"
                    session.add(property_)
                    class_ = session.query(RDFClass).filter(
                        RDFClass.name == object_[prop_name]).one()
                    triple = GraphIAC(
                        subject=instance.id,
                        predicate=property_.id,
                        object_=class_.id)
                    session.add(triple)
                else:
                    session.close()
                    raise NotAbstractProperty(type_=prop_name)

            # For insertion in IIT
            else:
                terminal = Terminal(value=object_[prop_name])
                session.add(terminal)
                session.flush()  # Assigns ID without committing

                if property_.type_ == "PROPERTY" or property_.type_ == "INSTANCE":
                    property_.type_ = "INSTANCE"
                    session.add(property_)
                    triple = GraphIIT(
                        subject=instance.id,
                        predicate=property_.id,
                        object_=terminal.id)
                    # Add things directly to session, if anything fails whole
                    # transaction is aborted
                    session.add(triple)
                else:
                    session.close()
                    raise NotInstanceProperty(type_=prop_name)

    session.commit()
    return instance.id
コード例 #52
0
ファイル: sql_interface.py プロジェクト: sagiben/openprices
 def exists(self, obj_field, value):
     (ret, ), = self.session.query(exists().where(obj_field == value))
     return ret
コード例 #53
0
def does_workflow_exist(playbook_id, workflow_id):
    return executiondb.execution_db.session.query(exists().where(
        and_(Workflow.id == workflow_id,
             Workflow.playbook_id == playbook_id))).scalar()
コード例 #54
0
class CoreFixtures(object):
    # lambdas which return a tuple of ColumnElement objects.
    # must return at least two objects that should compare differently.
    # to test more varieties of "difference" additional objects can be added.
    fixtures = [
        lambda: (
            column("q"),
            column("x"),
            column("q", Integer),
            column("q", String),
        ),
        lambda: (~column("q", Boolean), ~column("p", Boolean)),
        lambda: (
            table_a.c.a.label("foo"),
            table_a.c.a.label("bar"),
            table_a.c.b.label("foo"),
        ),
        lambda: (
            _label_reference(table_a.c.a.desc()),
            _label_reference(table_a.c.a.asc()),
        ),
        lambda: (_textual_label_reference("a"), _textual_label_reference("b")),
        lambda: (
            text("select a, b from table").columns(a=Integer, b=String),
            text("select a, b, c from table").columns(
                a=Integer, b=String, c=Integer
            ),
            text("select a, b, c from table where foo=:bar").bindparams(
                bindparam("bar", type_=Integer)
            ),
            text("select a, b, c from table where foo=:foo").bindparams(
                bindparam("foo", type_=Integer)
            ),
            text("select a, b, c from table where foo=:bar").bindparams(
                bindparam("bar", type_=String)
            ),
        ),
        lambda: (
            column("q") == column("x"),
            column("q") == column("y"),
            column("z") == column("x"),
            column("z") + column("x"),
            column("z") - column("x"),
            column("x") - column("z"),
            column("z") > column("x"),
            column("x").in_([5, 7]),
            column("x").in_([10, 7, 8]),
            # note these two are mathematically equivalent but for now they
            # are considered to be different
            column("z") >= column("x"),
            column("x") <= column("z"),
            column("q").between(5, 6),
            column("q").between(5, 6, symmetric=True),
            column("q").like("somstr"),
            column("q").like("somstr", escape="\\"),
            column("q").like("somstr", escape="X"),
        ),
        lambda: (
            table_a.c.a,
            table_a.c.a._annotate({"orm": True}),
            table_a.c.a._annotate({"orm": True})._annotate({"bar": False}),
            table_a.c.a._annotate(
                {"orm": True, "parententity": MyEntity("a", table_a)}
            ),
            table_a.c.a._annotate(
                {"orm": True, "parententity": MyEntity("b", table_a)}
            ),
            table_a.c.a._annotate(
                {"orm": True, "parententity": MyEntity("b", select([table_a]))}
            ),
        ),
        lambda: (
            cast(column("q"), Integer),
            cast(column("q"), Float),
            cast(column("p"), Integer),
        ),
        lambda: (
            bindparam("x"),
            bindparam("y"),
            bindparam("x", type_=Integer),
            bindparam("x", type_=String),
            bindparam(None),
        ),
        lambda: (_OffsetLimitParam("x"), _OffsetLimitParam("y")),
        lambda: (func.foo(), func.foo(5), func.bar()),
        lambda: (func.current_date(), func.current_time()),
        lambda: (
            func.next_value(Sequence("q")),
            func.next_value(Sequence("p")),
        ),
        lambda: (True_(), False_()),
        lambda: (Null(),),
        lambda: (ReturnTypeFromArgs("foo"), ReturnTypeFromArgs(5)),
        lambda: (FunctionElement(5), FunctionElement(5, 6)),
        lambda: (func.count(), func.not_count()),
        lambda: (func.char_length("abc"), func.char_length("def")),
        lambda: (GenericFunction("a", "b"), GenericFunction("a")),
        lambda: (CollationClause("foobar"), CollationClause("batbar")),
        lambda: (
            type_coerce(column("q", Integer), String),
            type_coerce(column("q", Integer), Float),
            type_coerce(column("z", Integer), Float),
        ),
        lambda: (table_a.c.a, table_b.c.a),
        lambda: (tuple_(1, 2), tuple_(3, 4)),
        lambda: (func.array_agg([1, 2]), func.array_agg([3, 4])),
        lambda: (
            func.percentile_cont(0.5).within_group(table_a.c.a),
            func.percentile_cont(0.5).within_group(table_a.c.b),
            func.percentile_cont(0.5).within_group(table_a.c.a, table_a.c.b),
            func.percentile_cont(0.5).within_group(
                table_a.c.a, table_a.c.b, column("q")
            ),
        ),
        lambda: (
            func.is_equal("a", "b").as_comparison(1, 2),
            func.is_equal("a", "c").as_comparison(1, 2),
            func.is_equal("a", "b").as_comparison(2, 1),
            func.is_equal("a", "b", "c").as_comparison(1, 2),
            func.foobar("a", "b").as_comparison(1, 2),
        ),
        lambda: (
            func.row_number().over(order_by=table_a.c.a),
            func.row_number().over(order_by=table_a.c.a, range_=(0, 10)),
            func.row_number().over(order_by=table_a.c.a, range_=(None, 10)),
            func.row_number().over(order_by=table_a.c.a, rows=(None, 20)),
            func.row_number().over(order_by=table_a.c.b),
            func.row_number().over(
                order_by=table_a.c.a, partition_by=table_a.c.b
            ),
        ),
        lambda: (
            func.count(1).filter(table_a.c.a == 5),
            func.count(1).filter(table_a.c.a == 10),
            func.foob(1).filter(table_a.c.a == 10),
        ),
        lambda: (
            and_(table_a.c.a == 5, table_a.c.b == table_b.c.a),
            and_(table_a.c.a == 5, table_a.c.a == table_b.c.a),
            or_(table_a.c.a == 5, table_a.c.b == table_b.c.a),
            ClauseList(table_a.c.a == 5, table_a.c.b == table_b.c.a),
            ClauseList(table_a.c.a == 5, table_a.c.b == table_a.c.a),
        ),
        lambda: (
            case(whens=[(table_a.c.a == 5, 10), (table_a.c.a == 10, 20)]),
            case(whens=[(table_a.c.a == 18, 10), (table_a.c.a == 10, 20)]),
            case(whens=[(table_a.c.a == 5, 10), (table_a.c.b == 10, 20)]),
            case(
                whens=[
                    (table_a.c.a == 5, 10),
                    (table_a.c.b == 10, 20),
                    (table_a.c.a == 9, 12),
                ]
            ),
            case(
                whens=[(table_a.c.a == 5, 10), (table_a.c.a == 10, 20)],
                else_=30,
            ),
            case({"wendy": "W", "jack": "J"}, value=table_a.c.a, else_="E"),
            case({"wendy": "W", "jack": "J"}, value=table_a.c.b, else_="E"),
            case({"wendy_w": "W", "jack": "J"}, value=table_a.c.a, else_="E"),
        ),
        lambda: (
            extract("foo", table_a.c.a),
            extract("foo", table_a.c.b),
            extract("bar", table_a.c.a),
        ),
        lambda: (
            Slice(1, 2, 5),
            Slice(1, 5, 5),
            Slice(1, 5, 10),
            Slice(2, 10, 15),
        ),
        lambda: (
            select([table_a.c.a]),
            select([table_a.c.a, table_a.c.b]),
            select([table_a.c.b, table_a.c.a]),
            select([table_a.c.a]).where(table_a.c.b == 5),
            select([table_a.c.a])
            .where(table_a.c.b == 5)
            .where(table_a.c.a == 10),
            select([table_a.c.a]).where(table_a.c.b == 5).with_for_update(),
            select([table_a.c.a])
            .where(table_a.c.b == 5)
            .with_for_update(nowait=True),
            select([table_a.c.a]).where(table_a.c.b == 5).correlate(table_b),
            select([table_a.c.a])
            .where(table_a.c.b == 5)
            .correlate_except(table_b),
        ),
        lambda: (
            select([table_a.c.a]).cte(),
            select([table_a.c.a]).cte(recursive=True),
            select([table_a.c.a]).cte(name="some_cte", recursive=True),
            select([table_a.c.a]).cte(name="some_cte"),
            select([table_a.c.a]).cte(name="some_cte").alias("other_cte"),
            select([table_a.c.a])
            .cte(name="some_cte")
            .union_all(select([table_a.c.a])),
            select([table_a.c.a])
            .cte(name="some_cte")
            .union_all(select([table_a.c.b])),
            select([table_a.c.a]).lateral(),
            select([table_a.c.a]).lateral(name="bar"),
            table_a.tablesample(func.bernoulli(1)),
            table_a.tablesample(func.bernoulli(1), seed=func.random()),
            table_a.tablesample(func.bernoulli(1), seed=func.other_random()),
            table_a.tablesample(func.hoho(1)),
            table_a.tablesample(func.bernoulli(1), name="bar"),
            table_a.tablesample(
                func.bernoulli(1), name="bar", seed=func.random()
            ),
        ),
        lambda: (
            select([table_a.c.a]),
            select([table_a.c.a]).prefix_with("foo"),
            select([table_a.c.a]).prefix_with("foo", dialect="mysql"),
            select([table_a.c.a]).prefix_with("foo", dialect="postgresql"),
            select([table_a.c.a]).prefix_with("bar"),
            select([table_a.c.a]).suffix_with("bar"),
        ),
        lambda: (
            select([table_a_2.c.a]),
            select([table_a_2_fs.c.a]),
            select([table_a_2_bs.c.a]),
        ),
        lambda: (
            select([table_a.c.a]),
            select([table_a.c.a]).with_hint(None, "some hint"),
            select([table_a.c.a]).with_hint(None, "some other hint"),
            select([table_a.c.a]).with_hint(table_a, "some hint"),
            select([table_a.c.a])
            .with_hint(table_a, "some hint")
            .with_hint(None, "some other hint"),
            select([table_a.c.a]).with_hint(table_a, "some other hint"),
            select([table_a.c.a]).with_hint(
                table_a, "some hint", dialect_name="mysql"
            ),
            select([table_a.c.a]).with_hint(
                table_a, "some hint", dialect_name="postgresql"
            ),
        ),
        lambda: (
            table_a.join(table_b, table_a.c.a == table_b.c.a),
            table_a.join(
                table_b, and_(table_a.c.a == table_b.c.a, table_a.c.b == 1)
            ),
            table_a.outerjoin(table_b, table_a.c.a == table_b.c.a),
        ),
        lambda: (
            table_a.alias("a"),
            table_a.alias("b"),
            table_a.alias(),
            table_b.alias("a"),
            select([table_a.c.a]).alias("a"),
        ),
        lambda: (
            FromGrouping(table_a.alias("a")),
            FromGrouping(table_a.alias("b")),
        ),
        lambda: (
            SelectStatementGrouping(select([table_a])),
            SelectStatementGrouping(select([table_b])),
        ),
        lambda: (
            select([table_a.c.a]).scalar_subquery(),
            select([table_a.c.a]).where(table_a.c.b == 5).scalar_subquery(),
        ),
        lambda: (
            exists().where(table_a.c.a == 5),
            exists().where(table_a.c.b == 5),
        ),
        lambda: (
            union(select([table_a.c.a]), select([table_a.c.b])),
            union(select([table_a.c.a]), select([table_a.c.b])).order_by("a"),
            union_all(select([table_a.c.a]), select([table_a.c.b])),
            union(select([table_a.c.a])),
            union(
                select([table_a.c.a]),
                select([table_a.c.b]).where(table_a.c.b > 5),
            ),
        ),
        lambda: (
            table("a", column("x"), column("y")),
            table("a", column("y"), column("x")),
            table("b", column("x"), column("y")),
            table("a", column("x"), column("y"), column("z")),
            table("a", column("x"), column("y", Integer)),
            table("a", column("q"), column("y", Integer)),
        ),
        lambda: (table_a, table_b),
    ]

    dont_compare_values_fixtures = [
        lambda: (
            # same number of params each time, so compare for IN
            # with legacy behavior of bind for each value works
            column("x").in_(random_choices(range(10), k=3)),
            # expanding IN places the whole list into a single parameter
            # so it can be of arbitrary length as well
            column("x").in_(
                bindparam(
                    "q",
                    random_choices(range(10), k=random.randint(0, 7)),
                    expanding=True,
                )
            ),
            column("x") == random.randint(1, 10),
        )
    ]

    def _complex_fixtures():
        def one():
            a1 = table_a.alias()
            a2 = table_b_like_a.alias()

            stmt = (
                select([table_a.c.a, a1.c.b, a2.c.b])
                .where(table_a.c.b == a1.c.b)
                .where(a1.c.b == a2.c.b)
                .where(a1.c.a == 5)
            )

            return stmt

        def one_diff():
            a1 = table_b_like_a.alias()
            a2 = table_a.alias()

            stmt = (
                select([table_a.c.a, a1.c.b, a2.c.b])
                .where(table_a.c.b == a1.c.b)
                .where(a1.c.b == a2.c.b)
                .where(a1.c.a == 5)
            )

            return stmt

        def two():
            inner = one().subquery()

            stmt = select([table_b.c.a, inner.c.a, inner.c.b]).select_from(
                table_b.join(inner, table_b.c.b == inner.c.b)
            )

            return stmt

        def three():

            a1 = table_a.alias()
            a2 = table_a.alias()
            ex = exists().where(table_b.c.b == a1.c.a)

            stmt = (
                select([a1.c.a, a2.c.a])
                .select_from(a1.join(a2, a1.c.b == a2.c.b))
                .where(ex)
            )
            return stmt

        return [one(), one_diff(), two(), three()]

    fixtures.append(_complex_fixtures)
コード例 #55
0
ファイル: bibfiles_db.py プロジェクト: zogamnuam/glottolog
def assign_ids(conn, verbose=False):
    merged_entry, entrygrp = Database._merged_entry, Database._entrygrp
    other = sa.orm.aliased(Entry)

    assert Entry.allhash(conn)

    reset_entries = sa.update(Entry, bind=conn).values(id=sa.null(), srefid=Entry.refid)
    print('%d entries' % reset_entries.execute().rowcount)

    # resolve splits: srefid = refid only for entries from the most similar hash group
    nsplit = 0
    select_split = sa.select([Entry.refid, Entry.hash, File.name, Entry.bibkey], bind=conn)\
        .select_from(sa.join(Entry, File))\
        .order_by(Entry.refid, Entry.hash, File.name, Entry.bibkey)\
        .where(sa.exists()
            .where(other.refid == Entry.refid)
            .where(other.hash != Entry.hash))
    update_split = sa.update(Entry, bind=conn)\
        .where(Entry.refid == sa.bindparam('eq_refid'))\
        .where(Entry.hash != sa.bindparam('ne_hash'))\
        .values(srefid=sa.null()).execute
    for refid, group in group_first(select_split.execute()):
        old = merged_entry(entrygrp(conn, refid), raw=True)
        nsplit += len(group)
        cand = [
            (hs, merged_entry(entrygrp(conn, hs), raw=True))
            for hs in unique(hs for _, hs, _, _ in group)]
        new = min(cand, key=lambda p: distance(old, p[1]))[0]
        separated = update_split(eq_refid=refid, ne_hash=new).rowcount
        if verbose:
            for row in group:
                print(row)
            for _, _, fn, bk in group:
                print('\t%r, %r, %r, %r' % Value.hashfields(conn, fn, bk))
            print('-> %s' % new)
            print('%d: %d separated from %s\n' % (refid, separated, new))
    print('%d splitted' % nsplit)

    nosplits = sa.select([~sa.exists().select_from(Entry).where(sa.exists()
        .where(other.srefid == Entry.srefid)
        .where(other.hash != Entry.hash))], bind=conn)
    assert nosplits.scalar()

    # resolve merges: id = srefid of the most similar srefid group
    nmerge = 0
    select_merge = sa.select([Entry.hash, Entry.srefid, File.name, Entry.bibkey], bind=conn)\
        .select_from(sa.join(Entry, File))\
        .order_by(Entry.hash, Entry.srefid.desc(), File.name, Entry.bibkey)\
        .where(sa.exists()
            .where(other.hash == Entry.hash)
            .where(other.srefid != Entry.srefid))
    update_merge = sa.update(Entry, bind=conn)\
        .where(Entry.hash == sa.bindparam('eq_hash'))\
        .where(Entry.srefid != sa.bindparam('ne_srefid'))\
        .values(id=sa.bindparam('new_id')).execute
    for hash, group in group_first(select_merge.execute()):
        new = merged_entry(entrygrp(conn, hash), raw=True)
        nmerge += len(group)
        cand = [
            (ri, merged_entry(entrygrp(conn, ri), raw=True))
            for ri in unique(ri for _, ri, _, _ in group)]
        old = min(cand, key=lambda p: distance(new, p[1]))[0]
        merged = update_merge(eq_hash=hash, ne_srefid=old, new_id=old).rowcount
        if verbose:
            for row in group:
                print(row)
            for _, _, fn, bk in group:
                print('\t%r, %r, %r, %r' % Value.hashfields(conn, fn, bk))
            print('-> %s' % old)
            print('%s: %d merged into %d\n' % (hash, merged, old))
    print('%d merged' % nmerge)

    # unchanged entries
    update_unchanged = sa.update(Entry, bind=conn)\
        .where(Entry.id == sa.null())\
        .where(Entry.srefid != sa.null())\
        .values(id=Entry.srefid)
    print('%d unchanged' % update_unchanged.execute().rowcount)

    nomerges = sa.select([~sa.exists().select_from(Entry).where(sa.exists()
        .where(other.hash == Entry.hash)
        .where(other.id != Entry.id))], bind=conn)
    assert nomerges.scalar()

    # identified
    update_identified = sa.update(Entry, bind=conn)\
        .where(Entry.refid == sa.null())\
        .where(sa.exists()
            .where(other.hash == Entry.hash)
            .where(other.id != sa.null()))\
        .values(id=sa.select([other.id]).where(other.hash == Entry.hash).where(other.id != sa.null()))
    print('%d identified (new/separated)' % update_identified.execute().rowcount)

    # assign new ids to hash groups of separated/new entries
    nextid = sa.select([sa.func.coalesce(sa.func.max(Entry.refid), 0) + 1], bind=conn).scalar()
    select_new = sa.select([Entry.hash], bind=conn)\
        .where(Entry.id == sa.null())\
        .group_by(Entry.hash)\
        .order_by(Entry.hash)
    assert conn.dialect.paramstyle == 'qmark'
    update_new = sa.update(Entry, bind=conn)\
        .values(id=sa.bindparam('new_id'))\
        .where(Entry.hash == sa.bindparam('eq_hash')).compile().string
    params = ((id, hash) for id, (hash,) in enumerate(select_new.execute(), nextid))
    dbapi_rowcount = conn.connection.executemany(update_new, params).rowcount
    # https://docs.python.org/2/library/sqlite3.html#sqlite3.Cursor.rowcount
    print('%d new ids (new/separated)' % (0 if dbapi_rowcount == -1 else dbapi_rowcount))

    assert Entry.allid(conn)
    assert Entry.onetoone(conn)

    # supersede relation
    select_superseded = sa.select([sa.func.count()], bind=conn).where(Entry.id != Entry.srefid)
    print('%d supersede pairs' % select_superseded.scalar())
コード例 #56
0
    def association_subquery(self, obj):
        """
        Returns an EXISTS clause that checks if an association exists for given
        SQLAlchemy declarative object. This query is used by
        many_to_many_criteria method.

        Example query:

        .. code-block:: sql

        EXISTS (
            SELECT 1
            FROM article_tag_version
            WHERE article_id = 3
            AND tag_id = tags_version.id
            AND operation_type != 2
            AND EXISTS (
                SELECT 1
                FROM article_tag_version as article_tag_version2
                WHERE article_tag_version2.tag_id = article_tag_version.tag_id
                AND article_tag_version2.tx_id <=5
                GROUP BY article_tag_version2.tag_id
                HAVING
                    MAX(article_tag_version2.tx_id) =
                    article_tag_version.tx_id
            )
        )

        :param obj: SQLAlchemy declarative object
        """

        tx_column = option(obj, 'transaction_column_name')
        reflector = VersionExpressionReflector(obj, self.property)

        association_table_alias = self.association_version_table.alias()
        association_cols = [
            association_table_alias.c[association_col.name]
            for _, association_col in self.remote_to_association_column_pairs
        ]

        association_exists = sa.exists(
            sa.select([1]).where(
                sa.and_(
                    association_table_alias.c[tx_column] <= getattr(
                        obj, tx_column),
                    *[
                        association_col == self.association_version_table.c[
                            association_col.name]
                        for association_col in association_cols
                    ])).group_by(*association_cols).having(
                        sa.func.max(association_table_alias.c[tx_column]) ==
                        self.association_version_table.c[tx_column]).correlate(
                            self.association_version_table))
        return sa.exists(
            sa.select([1]).where(
                sa.and_(
                    reflector(self.property.primaryjoin),
                    association_exists,
                    self.association_version_table.c.operation_type !=
                    Operation.DELETE,
                    adapt_columns(self.property.secondaryjoin),
                )).correlate(self.local_cls, self.remote_cls))
コード例 #57
0
ファイル: sql_interface.py プロジェクト: sagiben/openprices
 def exists_where_list(self, obj_fields, values):
     return self.session.query(exists().where(
         and_(*[field == value
                for field, value in zip(obj_fields, values)]))).scalar()
コード例 #58
0
def event_channel_exists(session, event_channel_id):
    return session.query(
        exists().where(EventChannel.id == event_channel_id)).scalar()
コード例 #59
0
ファイル: legacy.py プロジェクト: zooba/warehouse
def file_upload(request):
    # Before we do anything, if there isn't an authenticated user with this
    # request, then we'll go ahead and bomb out.
    if request.authenticated_userid is None:
        raise _exc_with_message(
            HTTPForbidden,
            "Invalid or non-existent authentication information.",
        )

    # distutils "helpfully" substitutes unknown, but "required" values with the
    # string "UNKNOWN". This is basically never what anyone actually wants so
    # we'll just go ahead and delete anything whose value is UNKNOWN.
    for key in list(request.POST):
        if request.POST.get(key) == "UNKNOWN":
            del request.POST[key]

    # We require protocol_version 1, it's the only supported version however
    # passing a different version should raise an error.
    if request.POST.get("protocol_version", "1") != "1":
        raise _exc_with_message(HTTPBadRequest, "Unknown protocol version.")

    # Look up all of the valid classifiers
    all_classifiers = request.db.query(Classifier).all()

    # Validate and process the incoming metadata.
    form = MetadataForm(request.POST)
    form.classifiers.choices = [
        (c.classifier, c.classifier) for c in all_classifiers
    ]
    if not form.validate():
        for field_name in _error_message_order:
            if field_name in form.errors:
                break
        else:
            field_name = sorted(form.errors.keys())[0]

        raise _exc_with_message(
            HTTPBadRequest,
            "{field}: {msgs[0]}".format(
                field=field_name,
                msgs=form.errors[field_name],
            ),
        )

    # Ensure that we have file data in the request.
    if "content" not in request.POST:
        raise _exc_with_message(
            HTTPBadRequest,
            "Upload payload does not have a file.",
        )

    # Look up the project first before doing anything else, this is so we can
    # automatically register it if we need to and can check permissions before
    # going any further.
    try:
        project = (
            request.db.query(Project)
                      .filter(
                          Project.normalized_name ==
                          func.normalize_pep426_name(form.name.data)).one()
        )
    except NoResultFound:
        # Before we create the project, we're going to check our blacklist to
        # see if this project is even allowed to be registered. If it is not,
        # then we're going to deny the request to create this project.
        if request.db.query(exists().where(
                BlacklistedProject.name ==
                func.normalize_pep426_name(form.name.data))).scalar():
            raise _exc_with_message(
                HTTPBadRequest,
                ("The name {!r} is not allowed. "
                 "See https://pypi.org/help/#project-name "
                 "for more information.")
                .format(form.name.data),
            ) from None

        # Also check for collisions with Python Standard Library modules.
        if (packaging.utils.canonicalize_name(form.name.data) in
                STDLIB_PROHIBITTED):
            raise _exc_with_message(
                HTTPBadRequest,
                ("The name {!r} is not allowed (conflict with Python "
                 "Standard Library module name). See "
                 "https://pypi.org/help/#project-name for more information.")
                .format(form.name.data),
            ) from None

        # The project doesn't exist in our database, so we'll add it along with
        # a role setting the current user as the "Owner" of the project.
        project = Project(name=form.name.data)
        request.db.add(project)
        request.db.add(
            Role(user=request.user, project=project, role_name="Owner")
        )
        # TODO: This should be handled by some sort of database trigger or a
        #       SQLAlchemy hook or the like instead of doing it inline in this
        #       view.
        request.db.add(
            JournalEntry(
                name=project.name,
                action="create",
                submitted_by=request.user,
                submitted_from=request.remote_addr,
            ),
        )
        request.db.add(
            JournalEntry(
                name=project.name,
                action="add Owner {}".format(request.user.username),
                submitted_by=request.user,
                submitted_from=request.remote_addr,
            ),
        )

    # Check that the user has permission to do things to this project, if this
    # is a new project this will act as a sanity check for the role we just
    # added above.
    if not request.has_permission("upload", project):
        raise _exc_with_message(
            HTTPForbidden,
            ("The user '{0}' is not allowed to upload to project '{1}'. "
             "See https://pypi.org/help#project-name for more information.")
            .format(request.user.username, project.name)
        )

    try:
        release = (
            request.db.query(Release)
                      .filter(
                            (Release.project == project) &
                            (Release.version == form.version.data)).one()
        )
    except NoResultFound:
        release = Release(
            project=project,
            _classifiers=[
                c for c in all_classifiers
                if c.classifier in form.classifiers.data
            ],
            _pypi_hidden=False,
            dependencies=list(_construct_dependencies(
                form,
                {
                    "requires": DependencyKind.requires,
                    "provides": DependencyKind.provides,
                    "obsoletes": DependencyKind.obsoletes,
                    "requires_dist": DependencyKind.requires_dist,
                    "provides_dist": DependencyKind.provides_dist,
                    "obsoletes_dist": DependencyKind.obsoletes_dist,
                    "requires_external": DependencyKind.requires_external,
                    "project_urls": DependencyKind.project_url,
                }
            )),
            **{
                k: getattr(form, k).data
                for k in {
                    # This is a list of all the fields in the form that we
                    # should pull off and insert into our new release.
                    "version",
                    "summary", "description", "license",
                    "author", "author_email", "maintainer", "maintainer_email",
                    "keywords", "platform",
                    "home_page", "download_url",
                    "requires_python",
                }
            }
        )
        request.db.add(release)
        # TODO: This should be handled by some sort of database trigger or a
        #       SQLAlchemy hook or the like instead of doing it inline in this
        #       view.
        request.db.add(
            JournalEntry(
                name=release.project.name,
                version=release.version,
                action="new release",
                submitted_by=request.user,
                submitted_from=request.remote_addr,
            ),
        )

    # TODO: We need a better solution to this than to just do it inline inside
    #       this method. Ideally the version field would just be sortable, but
    #       at least this should be some sort of hook or trigger.
    releases = (
        request.db.query(Release)
                  .filter(Release.project == project)
                  .all()
    )
    for i, r in enumerate(sorted(
            releases, key=lambda x: packaging.version.parse(x.version))):
        r._pypi_ordering = i

    # TODO: Again, we should figure out a better solution to doing this than
    #       just inlining this inside this method.
    if project.autohide:
        for r in releases:
            r._pypi_hidden = bool(not r == release)

    # Pull the filename out of our POST data.
    filename = request.POST["content"].filename

    # Make sure that the filename does not contain any path separators.
    if "/" in filename or "\\" in filename:
        raise _exc_with_message(
            HTTPBadRequest,
            "Cannot upload a file with '/' or '\\' in the name.",
        )

    # Make sure the filename ends with an allowed extension.
    if _dist_file_regexes[project.allow_legacy_files].search(filename) is None:
        raise _exc_with_message(
            HTTPBadRequest,
            "Invalid file extension. PEP 527 requires one of: .egg, .tar.gz, "
            ".whl, .zip (https://www.python.org/dev/peps/pep-0527/)."
        )

    # Make sure that our filename matches the project that it is being uploaded
    # to.
    prefix = pkg_resources.safe_name(project.name).lower()
    if not pkg_resources.safe_name(filename).lower().startswith(prefix):
        raise _exc_with_message(
            HTTPBadRequest,
            "The filename for {!r} must start with {!r}.".format(
                project.name,
                prefix,
            )
        )

    # Check the content type of what is being uploaded
    if (not request.POST["content"].type or
            request.POST["content"].type.startswith("image/")):
        raise _exc_with_message(HTTPBadRequest, "Invalid distribution file.")

    # Ensure that the package filetpye is allowed.
    # TODO: Once PEP 527 is completely implemented we should be able to delete
    #       this and just move it into the form itself.
    if (not project.allow_legacy_files and
            form.filetype.data not in {"sdist", "bdist_wheel", "bdist_egg"}):
        raise _exc_with_message(HTTPBadRequest, "Unknown type of file.")

    # The project may or may not have a file size specified on the project, if
    # it does then it may or may not be smaller or larger than our global file
    # size limits.
    file_size_limit = max(filter(None, [MAX_FILESIZE, project.upload_limit]))

    with tempfile.TemporaryDirectory() as tmpdir:
        temporary_filename = os.path.join(tmpdir, filename)

        # Buffer the entire file onto disk, checking the hash of the file as we
        # go along.
        with open(temporary_filename, "wb") as fp:
            file_size = 0
            file_hashes = {
                "md5": hashlib.md5(),
                "sha256": hashlib.sha256(),
                "blake2_256": hashlib.blake2b(digest_size=256 // 8),
            }
            for chunk in iter(
                    lambda: request.POST["content"].file.read(8096), b""):
                file_size += len(chunk)
                if file_size > file_size_limit:
                    raise _exc_with_message(HTTPBadRequest, "File too large.")
                fp.write(chunk)
                for hasher in file_hashes.values():
                    hasher.update(chunk)

        # Take our hash functions and compute the final hashes for them now.
        file_hashes = {
            k: h.hexdigest().lower()
            for k, h in file_hashes.items()
        }

        # Actually verify the digests that we've gotten. We're going to use
        # hmac.compare_digest even though we probably don't actually need to
        # because it's better safe than sorry. In the case of multiple digests
        # we expect them all to be given.
        if not all([
            hmac.compare_digest(
                getattr(form, "{}_digest".format(digest_name)).data.lower(),
                digest_value,
            )
            for digest_name, digest_value in file_hashes.items()
            if getattr(form, "{}_digest".format(digest_name)).data
        ]):
            raise _exc_with_message(
                HTTPBadRequest,
                "The digest supplied does not match a digest calculated "
                "from the uploaded file."
            )

        # Check to see if the file that was uploaded exists already or not.
        is_duplicate = _is_duplicate_file(request.db, filename, file_hashes)
        if is_duplicate:
            return Response()
        elif is_duplicate is not None:
            raise _exc_with_message(HTTPBadRequest, "File already exists.")

        # Check to see if the file that was uploaded exists in our filename log
        if (request.db.query(
                request.db.query(Filename)
                          .filter(Filename.filename == filename)
                          .exists()).scalar()):
            raise _exc_with_message(
                HTTPBadRequest,
                "This filename has previously been used, you should use a "
                "different version.",
            )

        # Check to see if uploading this file would create a duplicate sdist
        # for the current release.
        if (form.filetype.data == "sdist" and
                request.db.query(
                    request.db.query(File)
                              .filter((File.release == release) &
                                      (File.packagetype == "sdist"))
                              .exists()).scalar()):
            raise _exc_with_message(
                HTTPBadRequest,
                "Only one sdist may be uploaded per release.",
            )

        # Check the file to make sure it is a valid distribution file.
        if not _is_valid_dist_file(temporary_filename, form.filetype.data):
            raise _exc_with_message(
                HTTPBadRequest,
                "Invalid distribution file.",
            )

        # Check that if it's a binary wheel, it's on a supported platform
        if filename.endswith(".whl"):
            wheel_info = _wheel_file_re.match(filename)
            plats = wheel_info.group("plat").split(".")
            for plat in plats:
                if not _valid_platform_tag(plat):
                    raise _exc_with_message(
                        HTTPBadRequest,
                        "Binary wheel '{filename}' has an unsupported "
                        "platform tag '{plat}'."
                        .format(filename=filename, plat=plat)
                    )

        # Also buffer the entire signature file to disk.
        if "gpg_signature" in request.POST:
            has_signature = True
            with open(os.path.join(tmpdir, filename + ".asc"), "wb") as fp:
                signature_size = 0
                for chunk in iter(
                        lambda: request.POST["gpg_signature"].file.read(8096),
                        b""):
                    signature_size += len(chunk)
                    if signature_size > MAX_SIGSIZE:
                        raise _exc_with_message(
                            HTTPBadRequest,
                            "Signature too large.",
                        )
                    fp.write(chunk)

            # Check whether signature is ASCII armored
            with open(os.path.join(tmpdir, filename + ".asc"), "rb") as fp:
                if not fp.read().startswith(b"-----BEGIN PGP SIGNATURE-----"):
                    raise _exc_with_message(
                        HTTPBadRequest,
                        "PGP signature is not ASCII armored.",
                    )
        else:
            has_signature = False

        # TODO: This should be handled by some sort of database trigger or a
        #       SQLAlchemy hook or the like instead of doing it inline in this
        #       view.
        request.db.add(Filename(filename=filename))

        # Store the information about the file in the database.
        file_ = File(
            release=release,
            filename=filename,
            python_version=form.pyversion.data,
            packagetype=form.filetype.data,
            comment_text=form.comment.data,
            size=file_size,
            has_signature=bool(has_signature),
            md5_digest=file_hashes["md5"],
            sha256_digest=file_hashes["sha256"],
            blake2_256_digest=file_hashes["blake2_256"],
            # Figure out what our filepath is going to be, we're going to use a
            # directory structure based on the hash of the file contents. This
            # will ensure that the contents of the file cannot change without
            # it also changing the path that the file is saved too.
            path="/".join([
                file_hashes[PATH_HASHER][:2],
                file_hashes[PATH_HASHER][2:4],
                file_hashes[PATH_HASHER][4:],
                filename,
            ]),
        )
        request.db.add(file_)

        # TODO: This should be handled by some sort of database trigger or a
        #       SQLAlchemy hook or the like instead of doing it inline in this
        #       view.
        request.db.add(
            JournalEntry(
                name=release.project.name,
                version=release.version,
                action="add {python_version} file {filename}".format(
                    python_version=file_.python_version,
                    filename=file_.filename,
                ),
                submitted_by=request.user,
                submitted_from=request.remote_addr,
            ),
        )

        # TODO: We need a better answer about how to make this transactional so
        #       this won't take affect until after a commit has happened, for
        #       now we'll just ignore it and save it before the transaction is
        #       committed.
        storage = request.find_service(IFileStorage)
        storage.store(
            file_.path,
            os.path.join(tmpdir, filename),
            meta={
                "project": file_.release.project.normalized_name,
                "version": file_.release.version,
                "package-type": file_.packagetype,
                "python-version": file_.python_version,
            },
        )
        if has_signature:
            storage.store(
                file_.pgp_path,
                os.path.join(tmpdir, filename + ".asc"),
                meta={
                    "project": file_.release.project.normalized_name,
                    "version": file_.release.version,
                    "package-type": file_.packagetype,
                    "python-version": file_.python_version,
                },
            )

        # TODO: Once we no longer have the legacy code base running PyPI we can
        #       go ahead and delete this tiny bit of shim code, since it only
        #       exists to purge stuff on legacy PyPI when uploaded to Warehouse
        old_domain = request.registry.settings.get("warehouse.legacy_domain")
        if old_domain:
            request.tm.get().addAfterCommitHook(
                _legacy_purge,
                args=["https://{}/pypi".format(old_domain)],
                kws={"data": {":action": "purge", "project": project.name}},
            )

    return Response()
コード例 #60
-1
ファイル: tags.py プロジェクト: rr-/szurubooru
    def merge_relations(
            table: model.Base, source_tag_id: int, target_tag_id: int) -> None:
        alias1 = table
        alias2 = sa.orm.util.aliased(table)
        update_stmt = (
            sa.sql.expression.update(alias1)
            .where(alias1.parent_id == source_tag_id)
            .where(alias1.child_id != target_tag_id)
            .where(
                ~sa.exists()
                .where(alias2.child_id == alias1.child_id)
                .where(alias2.parent_id == target_tag_id))
            .values(parent_id=target_tag_id))
        db.session.execute(update_stmt)

        update_stmt = (
            sa.sql.expression.update(alias1)
            .where(alias1.child_id == source_tag_id)
            .where(alias1.parent_id != target_tag_id)
            .where(
                ~sa.exists()
                .where(alias2.parent_id == alias1.parent_id)
                .where(alias2.child_id == target_tag_id))
            .values(child_id=target_tag_id))
        db.session.execute(update_stmt)