Ejemplo n.º 1
0
 def __eq__(self, other):
     if other is None:
         if self.prop.direction in [ONETOMANY, MANYTOMANY]:
             return ~sql.exists([1], self.prop.primaryjoin)
         else:
             return self.prop._optimized_compare(None)
     elif self.prop.uselist:
         if not hasattr(other, "__iter__"):
             raise exceptions.InvalidRequestError(
                 "Can only compare a collection to an iterable object.  Use contains()."
             )
         else:
             j = self.prop.primaryjoin
             if self.prop.secondaryjoin:
                 j = j & self.prop.secondaryjoin
             clauses = []
             for o in other:
                 clauses.append(
                     sql.exists(
                         [1],
                         j
                         & sql.and_(
                             *[
                                 x == y
                                 for (x, y) in zip(
                                     self.prop.mapper.primary_key, self.prop.mapper.primary_key_from_instance(o)
                                 )
                             ]
                         ),
                     )
                 )
             return sql.and_(*clauses)
     else:
         return self.prop._optimized_compare(other)
Ejemplo n.º 2
0
def member_join_post(db):
    nodes = [ (n.id,n.node_name) for n in db.query(models.SlcNode)]
    form = forms.member_join_form(nodes)
    if not form.validates(source=request.params):
        return render("join", form=form)    
        
    if db.query(exists().where(models.SlcMember.member_name == form.d.username)).scalar():
        return render("join",form=form,msg=u"用户{0}已被使用".format(form.d.username))
        
    if db.query(exists().where(models.SlcMember.email == form.d.email)).scalar():
        return render("join",form=form,msg=u"用户邮箱{0}已被使用".format(form.d.email))
    
    member = models.SlcMember()
    member.node_id = form.d.node_id
    member.realname = form.d.realname
    member.member_name = form.d.username
    member.password = md5(form.d.password.encode()).hexdigest()
    member.idcard = form.d.idcard
    member.sex = form.d.sex
    member.age = int(form.d.age)
    member.email = form.d.email
    member.mobile = form.d.mobile
    member.address = form.d.address
    member.create_time = utils.get_currtime()
    member.update_time = utils.get_currtime()
    db.add(member) 
    db.commit()
   
    logger.info(u"新用户注册成功,member_name=%s"%member.member_name)
    redirect('/login')
Ejemplo n.º 3
0
    def _create_debile_binaries(self, session, source, pkg):
        arch_all = session.query(Arch).filter(Arch.name == "all").one()
        arches = session.query(Arch).filter(Arch.name.in_(pkg.installed_archs)).all()

        if arch_all in source.arches and arch_all not in arches and source.affinity in arches:
            if not session.query(
                exists().where((Job.source == source) & (Job.arch == arch_all) & Job.check.has(Check.build == True))
            ).scalar():
                # We have the arch:affinity binary but is still lacking the arch:all binary
                # Make sure debile builds the arch:all binary separately
                check = session.query(Check).filter(Check.build == True).one()
                job = Job(check=check, arch=arch_all, source=source, binary=None)
                session.add(job)

        for arch in arches:
            if session.query(exists().where((Binary.source == source) & (Binary.arch == arch))).scalar():
                continue

            # Find the job for this binary
            job = (
                session.query(Job)
                .join(Job.check)
                .filter(Job.source == source, Job.arch == arch, Check.build == True)
                .first()
            )

            if not job and arch == arch_all and source.affinity in arches:
                # The arch:all binary might have been created by the arch:affinity build job.
                job = (
                    session.query(Job)
                    .join(Job.check)
                    .filter(Job.source == source, Job.arch == source.affinity, Check.build == True)
                    .first()
                )

            if job and (not job.finished_at or job.failed is True):
                # Dak accepted a binary upload that debile-master didn't ask for
                if arch != arch_all and not any(job.built_binaries):
                    session.delete(job)
                job = None

            if job:
                binary = job.new_binary(arch)
            else:
                binary = Binary(source=source, arch=arch, uploaded_at=datetime.utcnow())
            session.add(binary)

            for name, arch, filename in pkg.binaries:
                if arch == binary.arch.name:
                    directory, _, filename = filename.rpartition("/")
                    deb = Deb(binary=binary, directory=directory, filename=filename)
                    session.add(deb)

            print("Created binary for %s %s on %s" % (binary.name, binary.version, binary.arch))
            emit("accept", "binary", binary.debilize())
Ejemplo n.º 4
0
def q_where_exists():
    from sqlalchemy.sql import exists

    stmt = exists().where(Image.owner_id == User.id)
    for (name,) in session.query(User.name).filter(stmt):
        print name
    stmt = exists().where(User.name == "not exists")
    for (name,) in session.query(User.name).filter(stmt):
        print name
    else:
        print "not exists"
Ejemplo n.º 5
0
def _get_aggregate_dim_for_interim(stateCode=None, districtId=None, schoolId=None, asmtYear=None, tenant=None, subject_key=None, subject=None, is_public=False):
    '''
    Query for institution or grades that have asmts for the year provided
    :param string stateCode
    :param string districtId
    :param string schoolId
    :param string asmtType
    :param string asmtYear
    :param string tenant: tenant info for database connection
    :rtype: rset
    :returns: set of records with district guids
    '''
    def create_where_clause(fact_table, asmt):
        where = and_(fact_table.c.asmt_year == asmtYear, fact_table.c.state_code == stateCode, fact_table.c.rec_status == 'C',
                     fact_table.c.asmt_type == asmt, fact_table.c.inst_hier_rec_id == dim_inst_hier.c.inst_hier_rec_id,
                     fact_table.c.asmt_subject == subject)
        return where
    rows = {}
    with ReportingDbConnection(tenant=tenant, state_code=stateCode, is_public=is_public) as connector:
        # query custom metadata by state code
        dim_inst_hier = connector.get_table(Constants.DIM_INST_HIER)
        fact_asmt_outcome = connector.get_table(Constants.FACT_ASMT_OUTCOME_VW)
        fact_block_asmt_outcome = connector.get_table(Constants.FACT_BLOCK_ASMT_OUTCOME)
        s_fao = exists(['*'], from_obj=[dim_inst_hier]).where(create_where_clause(fact_asmt_outcome, AssessmentType.INTERIM_COMPREHENSIVE))
        s_fbao = exists(['*'], from_obj=[dim_inst_hier]).where(create_where_clause(fact_block_asmt_outcome, AssessmentType.INTERIM_ASSESSMENT_BLOCKS))
        if districtId is None and schoolId is None:
            query_fao = get_select_for_state_view(dim_inst_hier, stateCode).where(s_fao)
            query_fbao = get_select_for_state_view(dim_inst_hier, stateCode).where(s_fbao)
            query = query_fao.union(query_fbao)
        elif districtId is not None and schoolId is not None:
            fao_query = get_select_for_school_view(fact_asmt_outcome, stateCode, districtId, schoolId, asmtYear, AssessmentType.INTERIM_COMPREHENSIVE, subject)
            fbao_query = get_select_for_school_view(fact_block_asmt_outcome, stateCode, districtId, schoolId, asmtYear, AssessmentType.INTERIM_ASSESSMENT_BLOCKS, subject)
            query = fao_query.union(fbao_query)
        else:
            query_fao = get_select_for_district_view(dim_inst_hier, stateCode, districtId).where(s_fao)
            query_fbao = get_select_for_district_view(dim_inst_hier, stateCode, districtId).where(s_fbao)
            query = query_fao.union(query_fbao)
        results = connector.get_result(query)
        for result in results:
            params = {Constants.ID: result.get(Constants.ID), Constants.STATECODE: stateCode}
            if districtId is not None:
                params[Constants.DISTRICTGUID] = districtId
            if schoolId is not None:
                params[Constants.SCHOOLGUID] = schoolId
            data = {Constants.ID: result.get(Constants.ID),
                    Constants.ROWID: result.get(Constants.ID),
                    Constants.NAME: result.get(Constants.NAME),
                    Constants.PARAMS: params,
                    Constants.RESULTS: {}
                    }
            data[Constants.RESULTS][subject_key] = {Constants.ASMT_SUBJECT: subject, Constants.TOTAL: -1, Constants.HASINTERIM: True}
            rows[data[Constants.ID]] = data
    return rows
Ejemplo n.º 6
0
 def any(self, clause):
     return or_(
         and_(Group.membership_type != GroupMembershipType.inverted,
              exists([1],
                 from_obj=UserGroup.__table__.join(User.__table__),
                 whereclause=and_(UserGroup.group_id == Group.group_id,
                                  clause))
         ),
         and_(Group.membership_type == GroupMembershipType.inverted,
              not_(exists([1],
                  from_obj=ExcludedUserGroup.__table__.join(User.__table__),
                  whereclause=and_(ExcludedUserGroup.group_id == Group.group_id,
                              clause)))
         )
     )
Ejemplo n.º 7
0
    def load(self, skip_if_final=True):
        """Fetch all pertinent XML data, parse, and load into the database.
        Args:
            skip_if_final: Skip the download and parsing if the game exists in
                the database with a status of 'Final'? Defaults to True. If
                False, force refresh of all data

        """
        if skip_if_final:
            loaded = self.session.query(exists().where(
                (Game.game_id == self.game_id) & (Game.status == 'Final'))).scalar()
            if loaded:
                logging.info('{} exists with status = "Final". Skipping'.format(
                    self.game_id))
                # Be sure to close the session if we exit early!
                # TODO: Create the session in this method and wrap in a context
                # manager?
                self.session.close()
                return
        self.fetch_all()
        self.parse_all()
        # Poor-man's upsert. Try adding all objects. If there's a primary key
        # conflict, merge instead.
        try:
            self.session.add_all(self.to_load)
            # The integrity error won't be thrown until a commit
            self.session.commit()
        except exc.IntegrityError:
            self.session.rollback()
            [self.session.merge(x) for x in self.to_load]
            self.session.commit()
        self.session.close()
Ejemplo n.º 8
0
    def test_conditional_update_force_order(self):
        volume = self._create_volume()

        has_snapshot_filter = sql.exists().where(models.Snapshot.volume_id == models.Volume.id)

        case_values = volume.Case([(has_snapshot_filter, "has-snapshot")], else_="no-snapshot")

        values = {"status": "deleting", "previous_status": volume.model.status, "migration_status": case_values}

        order = ["status"]

        with mock.patch("cinder.db.sqlalchemy.api.model_query") as model_query:
            update = model_query.return_value.filter.return_value.update
            update.return_value = 0
            self.assertFalse(volume.conditional_update(values, {"status": "available"}, order=order))

        # We check that we are passing values to update to SQLAlchemy in the
        # right order
        self.assertEqual(1, update.call_count)
        self.assertListEqual(
            [("status", "deleting"), ("previous_status", volume.model.status), ("migration_status", mock.ANY)],
            list(update.call_args[0][0]),
        )
        self.assertDictEqual(
            {"synchronize_session": False, "update_args": {"preserve_parameter_order": True}}, update.call_args[1]
        )
Ejemplo n.º 9
0
def add_addresses(session):
    """ Add an AddressAssignment record for every PrimaryNameAssociation """
    q = session.query(PrimaryNameAssociation)
    q = q.join(System, DnsDomain)
    q = q.filter(System.ip != None)
    q = q.filter(~exists().where(AddressAssignment.ip == System.ip))
    q = q.options(contains_eager('dns_record'))
    q = q.options(contains_eager('dns_record.dns_domain'))
    q = q.options(subqueryload_all('hardware_entity.interfaces.vlans.assignments'))
    q = q.options(subqueryload_all('hardware_entity.interfaces._vlan_ids'))

    count = 0
    pnas = q.all()
    for pna in pnas:
        hw = pna.hardware_entity
        if len(hw.interfaces) != 1:
            print "{0} has an unexpected number of interfaces, skipping: " \
                    "{1}".format(hw, len(hw.interfaces))
            continue
        iface = hw.interfaces[0]
        if len(iface.vlans[0].addresses):
            print "{0} already has addresses, skipping".format(iface)
            continue
        #print "Adding AddressAssignment record for {0:l}".format(hw)
        iface.vlans[0].addresses.append(pna.dns_record.ip)
        count += 1

    session.flush()
    print "Added %d AddressAssignment records" % count
Ejemplo n.º 10
0
    def test_conditional_update_force_order(self):
        volume = self._create_volume()

        has_snapshot_filter = sql.exists().where(
            models.Snapshot.volume_id == models.Volume.id)

        case_values = volume.Case([(has_snapshot_filter, 'has-snapshot')],
                                  else_='no-snapshot')

        values = {'status': 'deleting',
                  'previous_status': volume.model.status,
                  'migration_status': case_values}

        order = ['status']

        with mock.patch('cinder.db.sqlalchemy.api.model_query') as model_query:
            update = model_query.return_value.filter.return_value.update
            update.return_value = 0
            self.assertFalse(volume.conditional_update(
                values, {'status': 'available'}, order=order))

        # We check that we are passing values to update to SQLAlchemy in the
        # right order
        self.assertEqual(1, update.call_count)
        self.assertListEqual(
            [('status', 'deleting'),
             ('previous_status', volume.model.status),
             ('migration_status', mock.ANY)],
            list(update.call_args[0][0]))
        self.assertDictEqual(
            {'synchronize_session': False,
             'update_args': {'preserve_parameter_order': True}},
            update.call_args[1])
Ejemplo n.º 11
0
 def auto_schedule_networks(self, plugin, context, host):
     """Schedule non-hosted networks to the DHCP agent on
     the specified host."""
     with context.session.begin(subtransactions=True):
         query = context.session.query(agents_db.Agent)
         query = query.filter(agents_db.Agent.agent_type ==
                              constants.AGENT_TYPE_DHCP,
                              agents_db.Agent.host == host,
                              agents_db.Agent.admin_state_up == True)
         try:
             dhcp_agent = query.one()
         except (exc.MultipleResultsFound, exc.NoResultFound):
             LOG.warn(_('No enabled DHCP agent on host %s'),
                      host)
             return False
         if agents_db.AgentDbMixin.is_agent_down(
             dhcp_agent.heartbeat_timestamp):
             LOG.warn(_('DHCP agent %s is not active'), dhcp_agent.id)
         #TODO(gongysh) consider the disabled agent's network
         net_stmt = ~exists().where(
             models_v2.Network.id ==
             agentschedulers_db.NetworkDhcpAgentBinding.network_id)
         net_ids = context.session.query(
             models_v2.Network.id).filter(net_stmt).all()
         if not net_ids:
             LOG.debug(_('No non-hosted networks'))
             return False
         for net_id in net_ids:
             binding = agentschedulers_db.NetworkDhcpAgentBinding()
             binding.dhcp_agent = dhcp_agent
             binding.network_id = net_id[0]
             context.session.add(binding)
     return True
Ejemplo n.º 12
0
Archivo: sql.py Proyecto: hoangt/ms3
    def add_score(self, mod, mem, full, score):

        # Update the cache.
        s = str(mod) + str(mem)
        if full:
            s += ':full'
        score_hash = self.get_hash(s)
        self.scores[score_hash] = score

        # Update the database.
        mod_id = self._get_model_id(mod)
        stmt = scores_table.insert().from_select([
                scores_table.c.model_id,
                scores_table.c.score_hash,
                scores_table.c.score,
            ], select([
                literal(mod_id),
                literal(score_hash),
                literal(score),
            ]).where(
                ~exists([scores_table.c.score]).where(
                    scores_table.c.score_hash == score_hash
                )
            )
        )
        self._execute(stmt)
        return True
Ejemplo n.º 13
0
Archivo: sql.py Proyecto: hoangt/ms3
    def _get_trace_id(self, trace):

        # Get the hash.
        trace_hash = self.get_hash(trace)

        # Check the database.
        stmt = select([traces_table.c.id]).where(
            traces_table.c.trace_hash == trace_hash
        )
        row = self._execute(stmt).first()
        if row:
            return row.id

        # Attempt to insert a new trace.
        compressed = zlib.compress(str(trace))
        stmt = traces_table.insert().from_select([
            traces_table.c.trace_hash,
            traces_table.c.data,
        ], select([
            literal(trace_hash),
            literal(compressed, type_=Binary),
        ]).where(
            ~exists([traces_table.c.id]).where(
                traces_table.c.trace_hash == trace_hash
            )
        ))
        self._execute(stmt)

        return self._get_trace_id(trace)
Ejemplo n.º 14
0
def add_profile():
    """Add a profile"""
    form = UserProfileForm()
    if request.method == 'POST':
        if form.validate_on_submit():
            username  = request.form['username'].strip()
            firstname = request.form['firstname'].strip()
            lastname = request.form['lastname'].strip()
            sex   = request.form['sex']
            age   = request.form['age']
            image = request.files['img']
            
            while True:
                userid = randint(620000000,620099999)
                if not db.session.query(exists().where(User.userid == str(userid))).scalar():
                    break
                  
            filename = "{}-{}".format(userid,secure_filename(image.filename))
            filepath = "app/static/uploads/{}".format(filename)
            image.save(filepath)
            
            user = User(str(userid),username,firstname,lastname,filename,sex,age,datetime.now())
            db.session.add(user)
            db.session.commit()
            
            flash('User successfully added!', category='success')
            return redirect(url_for('list_profiles'))
        
    return render_template('add_profile.html', form=form)
Ejemplo n.º 15
0
    def load(self, bed_file, alt_id=None, group_id='unknown', update=False):
        """Populate coverage data into a chanjo database.

        Args:
            bed_file (str): path to "chanjo annotate" BED output
            alt_id (str): sample id to switch to
            group_id (str, optional): group id for added verbosity
        """
        with codecs.open(bed_file, 'r', encoding='utf-8') as handle:
            if alt_id:
                bed_stream = switch_ids(alt_id, handle)
            else:
                bed_stream = handle

            logger.debug('check if coverage data already exists')
            exists_query = exists().where(Sample.id == alt_id)
            is_loaded = (self.db.query(exists_query).scalar())
            if is_loaded:
                logger.warn("Coverage already loaded for %s: %s",
                            group_id, bed_file)
                if update:
                    logger.debug('remove existing coverage data')
                    self._delete_sample(alt_id)
                else:
                    return

            import_bed(self.db, bed_stream)
Ejemplo n.º 16
0
Archivo: sql.py Proyecto: hoangt/ms3
    def add_cacti_result(self, name, access_time, cycle_time, area):
        """Add a CACTI result."""

        # Insert into the local cache.
        name_hash = self.get_hash(name)
        self.cacti_results[name_hash] = (access_time, cycle_time, area)

        # Insert into the database.
        stmt = cacti_results_table.insert().from_select([
                cacti_results_table.c.name_hash,
                cacti_results_table.c.name,
                cacti_results_table.c.area,
                cacti_results_table.c.access_time,
                cacti_results_table.c.cycle_time,
            ], select([
                literal(name_hash),
                literal(str(name)),
                literal(area),
                literal(access_time),
                literal(cycle_time),
            ]).where(
                ~exists([cacti_results_table.c.name_hash]).where(
                    cacti_results_table.c.name_hash == name_hash
                )
            )
        )
        self._execute(stmt)
        return True
Ejemplo n.º 17
0
    def test_conditional_update_non_iterable_expected_filter(self):
        # Volume we want to change
        volume = self._create_volume()

        # Another volume that has no snapshots
        volume2 = self._create_volume()

        # A volume with snapshots
        volume3 = self._create_volume()
        self._create_snapshot(volume3)

        # Update only it it has no snapshot
        filters = (~sql.exists().where(models.Snapshot.volume_id == models.Volume.id),)

        self.assertTrue(volume.conditional_update({"status": "deleting", "size": 2}, {"status": "available"}, filters))

        # Check that the object in memory has been updated
        self._check_volume(volume, "deleting", 2)

        # Check that the volume in the DB also has been updated
        self._check_volume(volume, "deleting", 2, True)

        # Check that the other volumes in the DB haven't changed
        self._check_volume(volume2, "available", 1, True)
        self._check_volume(volume3, "available", 1, True)
Ejemplo n.º 18
0
def check_shutdown():
    with session() as s:
        shutdown = not s.query(exists().where(
            (Job.assigned_at != None) & (Job.finished_at == None))
        ).scalar()
        if shutdown:
            raise SystemExit(0)
Ejemplo n.º 19
0
def insert_flow_data(**kwargs):
    """
    Inserts flow cytometry data, read from csv files. Creates a
    flowproject record if one does not already exist for the data to
    be inserted, and then creates a related flowdata record.
    """
    session = Session()
    # print(kwargs)
    if 'flowproject_name' in kwargs:
        proj_exists = session.query(exists().where(FlowProject.project_name==kwargs['flowproject_name'])).scalar()
        if(proj_exists == False): # we don't have this project yet, so create it.
            newFlowproj = FlowProject()
            newFlowproj.project_name = kwargs['flowproject_name']
            session.add(newFlowproj)
            logging.info("Adding new flowproject {}.".format(kwargs['flowproject_name']))
        newFlowdata = FlowData(**kwargs)
        session.add(newFlowdata)
        try:
            session.commit()
        except IntegrityError as e:
            reason = e.message
            logging.error("Error committing new FlowProject record: {}".format(reason))
            session.rollback()

    else:
        # should alert the user here too... someday
        logging.error("Function 'insert_flow_data' requires a flowproject_name parameter.")
Ejemplo n.º 20
0
Archivo: sql.py Proyecto: hoangt/ms3
    def add_fpga_result(self, name, frequency, bram_count,
                        lut_count, reg_count):
        """Add an FPGA timing result."""

        # Insert into the local cache.
        name_hash = self.get_hash(name)
        self.fpga_results[name_hash] = (frequency, bram_count,
                                        lut_count, reg_count)

        # Insert into the database.
        stmt = fpga_results_table.insert().from_select([
                fpga_results_table.c.name_hash,
                fpga_results_table.c.name,
                fpga_results_table.c.frequency,
                fpga_results_table.c.bram_count,
                fpga_results_table.c.lut_count,
                fpga_results_table.c.reg_count,
            ], select([
                literal(name_hash),
                literal(str(name)),
                literal(frequency),
                literal(bram_count),
                literal(lut_count),
                literal(reg_count),
            ]).where(
                ~exists([fpga_results_table.c.name_hash]).where(
                    fpga_results_table.c.name_hash == name_hash
                )
            )
        )
        self._execute(stmt)
        return True
Ejemplo n.º 21
0
 def run(self):
     recruit_url_list = self.get_list()
     for url in recruit_url_list:
         if db.session.query(exists().where(Job.url == url)).scalar():
             continue
         self.detail_info(url)
         self.save()
Ejemplo n.º 22
0
def start(session):
    print("Dividing numbers rounds them to second decimal!")
    points = 0
    username = input("Enter your playername>")
    user = User(name=username, score=points)
    print("Welcome {}! Let the game begin!".format(username))
    last_answer_correct = True
    while(last_answer_correct):
        expression = Expression.generate_expression()
        print("What is the answer to {} {} {}".format(
            expression[0], expression[2], expression[1]))
        answer = input("?>")
        if(float(answer) == expression[3]):
            print("?>Correct!")
            points += 1
        else:
            score = calculate_score(points)
            print("Incorrect! Ending game. You score is: {}".format(score))
            last_answer_correct = False
            if user.score < score:
                user.score = score
                session.query(User).filter(User.name==username).update({"score": score})
    if(session.query(exists().where(User.name == username)).scalar() == 0):
        session.add(user)
    session.commit()
Ejemplo n.º 23
0
    def test_conditional_update_fail_non_iterable_expected_filter(self):
        # Volume we want to change
        volume = self._create_volume()
        self._create_snapshot(volume)

        # A volume that has no snapshots
        volume2 = self._create_volume()

        # Another volume with snapshots
        volume3 = self._create_volume()
        self._create_snapshot(volume3)

        # Update only it it has no snapshot
        filters = (~sql.exists().where(
            models.Snapshot.volume_id == models.Volume.id),)

        self.assertFalse(volume.conditional_update(
            {'status': 'deleting', 'size': 2},
            {'status': 'available'},
            filters))

        # Check that the object in memory hasn't been updated
        self._check_volume(volume, 'available', 1)

        # Check that no volume in the DB also has been updated
        self._check_volume(volume, 'available', 1, True)
        self._check_volume(volume2, 'available', 1, True)
        self._check_volume(volume3, 'available', 1, True)
Ejemplo n.º 24
0
def register():
    if _user_is_authenticated():
        return redirect('/')

    form = RegisterForm()
    if form.validate_on_submit():
        if dbsession.query(
                exists().where(User.email == form.email.data)).scalar():
            warning_markup = Markup(
                'User with email %(email) already exists. '
                'Click <a href="%(login_link)">here</a> to login.',
                email=form.email.data, login_link=url_for('login'))
            flash(warning_markup, 'warning')
            return render_template('register.html', form=form)
        user = User(name=form.name.data,
                    email=form.email.data,
                    password_hash=bcrypt.encrypt(form.password.data))
        dbsession.add(user)
        dbsession.commit()
        login_user(user, remember=True)
        return redirect('/')
    else:
        write_errors_to_flash(form)

    return render_template('register.html', form=form)
Ejemplo n.º 25
0
    def gets_show_in_ebooking(cls, session, hotel_name=None, city_id=None, merchant_ids=None, is_new=None, start=0, limit=20):
        query = session.query(HotelMappingModel)\
                .filter(HotelMappingModel.is_delete == 0)\
                .filter(HotelMappingModel.provider_id == 6,
                        HotelMappingModel.status == cls.STATUS.valid_complete)

        if is_new is not None:
            from models.room_type_mapping import RoomTypeMappingModel
            stmt = exists().where(and_(HotelMappingModel.provider_id == RoomTypeMappingModel.provider_id,
                        HotelMappingModel.provider_hotel_id == RoomTypeMappingModel.provider_hotel_id,
                        RoomTypeMappingModel.is_new == 1,
                        RoomTypeMappingModel.is_delete == 0))
            query = query.filter(or_(stmt, HotelMappingModel.is_new == is_new))

        if city_id:
            query = query.filter(HotelMappingModel.city_id == city_id)
        if merchant_ids is not None:
            query = query.filter(HotelMappingModel.merchant_id.in_(merchant_ids))
        if hotel_name:
            query = query.filter(HotelMappingModel.provider_hotel_name.like(u'%{}%'.format(hotel_name)))

        r = query.order_by(HotelMappingModel.id.desc()).offset(start).limit(limit).all()
        total = query.count()

        return r, total
Ejemplo n.º 26
0
    def gets_show_in_firstvalid(cls, session, provider_id=None, hotel_name=None, city_id=None, start=0,
                                limit=20, status=-1):
        from models.room_type_mapping import RoomTypeMappingModel

        stmt = exists().where(and_(HotelMappingModel.provider_id == RoomTypeMappingModel.provider_id,
                HotelMappingModel.provider_hotel_id == RoomTypeMappingModel.provider_hotel_id,
                RoomTypeMappingModel.status == cls.STATUS.wait_first_valid,
                RoomTypeMappingModel.is_delete == 0))

        query = session.query(HotelMappingModel)
        if provider_id:
            query = query.filter(HotelMappingModel.provider_id == provider_id)
        if city_id:
            query = query.filter(HotelMappingModel.city_id == city_id)
        if hotel_name:
            query = query.filter(HotelMappingModel.provider_hotel_name.like(u'%{}%'.format(hotel_name)))
        if status != -1:
            query = query.filter(HotelMappingModel.status == status)

        query = query.filter(HotelMappingModel.provider_id != 6)\
            .filter(HotelMappingModel.is_delete == 0)\
            .filter(HotelMappingModel.status != HotelMappingModel.STATUS.init)
        # if status == -1:
        query = query.filter(or_(stmt, HotelMappingModel.status != HotelMappingModel.STATUS.init))
        # else:
        #         query = query.filter(and_(stmt))

        r = query.offset(start).limit(limit).all()
        total = query.count()

        return r, total
Ejemplo n.º 27
0
def add_interfaces(session):
    """ Add a default interface for all HW that has an IP """
    q = session.query(HardwareEntity)
    q = q.filter(~exists().where(Interface.hardware_entity_id == HardwareEntity.id))
    q = q.outerjoin(PrimaryNameAssociation, System, DnsDomain)
    q = q.options(contains_eager('_primary_name_asc'))
    q = q.options(contains_eager('_primary_name_asc.dns_record'))
    q = q.options(contains_eager('_primary_name_asc.dns_record.dns_domain'))
    q = q.filter(System.ip != None)

    hws = q.all()
    count = 0
    for hw in hws:
        if hw.hardware_type == "machine":
            interface = "eth0"
            itype = "public"
        elif hw.hardware_type == "switch":
            interface = "xge"
            itype = "oa"
        else:
            interface = "oa"
            itype = "oa"

        #print "Adding default interface for {0:l}".format(hw)

        dbinterface = Interface(hardware_entity=hw, name=interface,
                                interface_type="oa",
                                comments="Created automatically by upgrade script")
        session.add(dbinterface)
        count += 1

    session.flush()
    print "Added %d interfaces" % count
Ejemplo n.º 28
0
Archivo: sql.py Proyecto: hoangt/ms3
    def _get_memory_id(self, mem):

        # Check the local cache.
        mem_hash = self.get_hash(mem)
        if mem_hash in self.memories:
            return self.memories[mem_hash]

        # Attempt to insert a new memory.
        # This is the expected case.
        stmt = memories_table.insert().from_select([
                memories_table.c.name_hash,
                memories_table.c.name,
            ], select([
                literal(mem_hash),
                literal(mem),
            ]).where(
                ~exists([memories_table.c.id]).where(
                    memories_table.c.name_hash == mem_hash
                )
            )
        )
        self._execute(stmt)

        # Check the database.
        stmt = select([memories_table.c.id]).where(
            memories_table.c.name_hash == mem_hash
        )
        row = self._execute(stmt).first()
        ident = row.id
        self.memories[mem_hash] = ident
        return ident
Ejemplo n.º 29
0
def map():
	if request.method == 'POST':
		#print (db.session.query(exists().where(models.Plant.longitude == request.form['longitude'])).scalar())
		#print (request.form['ide'])
		if db.session.query(exists().where(models.Plant.id == request.form['ide'])).scalar():
			#print (db.session.query(exists().where(models.Plant.id == request.form['ide'])).scalar())
			p = db.session.query(models.Plant).\
				filter(models.Plant.id==request.form['ide'])
			if 'delete' in request.form and request.form['delete'] == 'delete':
				print ("Here!")
				for i in p:
					print (i)
					db.session.delete(i)
					db.session.commit()
			else:
				for i in p:
				 	print (i)
				 	i.commonName = request.form['commonName']
				 	i.latinName = request.form['latinName']
				 	db.session.commit()
		else:
			p = models.Plant(commonName = request.form['commonName'], latinName = request.form['latinName'], latitude = request.form['latitude'], longitude = request.form['longitude'])
			db.session.add(p)
			db.session.commit()
	p = models.Plant.query.all()
	plants = [dict(id=row.id, commonName=row.commonName, latinName=row.latinName, latitude=row.latitude, longitude=row.longitude) for row in p]
	# Debug statement
	#print plants
	return render_template('map.html', plants = plants)
Ejemplo n.º 30
0
def add_application(obj, override=True, temp=False):
    """
    PatentApplication Object converting to tables via SQLAlchemy
    Necessary to convert dates to datetime because of SQLite (OK on MySQL)

    Case Sensitivity and Table Reflection
    MySQL has inconsistent support for case-sensitive identifier names,
    basing support on specific details of the underlying operating system.
    However, it has been observed that no matter what case sensitivity
    behavior is present, the names of tables in foreign key declarations
    are always received from the database as all-lower case, making it
    impossible to accurately reflect a schema where inter-related tables
    use mixed-case identifier names.

    Therefore it is strongly advised that table names be declared as all
    lower case both within SQLAlchemy as well as on the MySQL database
    itself, especially if database reflection features are to be used.
    """

    # if the application exists, remove it so we can replace it
    (app_exists, ), = appsession.query(exists().where(schema.App_Application.number == obj.application))
    if app_exists:
        if override:
            app_query = appsession.query(schema.App_Application).filter(schema.App_Application.number == obj.application)
            appsession.delete(app_query.one())
        else:
            return
    if len(obj.app["number"]) < 3:
        return

    app = schema.App_Application(**obj.app)
    # lots of abstracts seem to be missing. why?
    add_all_app_fields(obj, app)

    appsession.merge(app)
Ejemplo n.º 31
0
def add_profile_POST():
    form = ProfileForm(csrf_enabled=False)
    if request.method == 'POST':
        if form.validate_on_submit():
            f_name = request.form['f_name'].strip()
            l_name = request.form['l_name'].strip()
            gender = request.form['gender']
            age = request.form['age']
            image = request.files['image']
            bio = request.form['bio'].strip()
            while True:
                u_id = randint(0000,9999)
                if not db.session.query(exists().where(UserProfile.u_id == str(u_id))).scalar():
                    break
            filename = secure_filename(image.filename)
            image.save(os.path.join('app/static/uploads', filename))
            created_on = datetime.now()
            user_profile = UserProfile(u_id,f_name,l_name,gender,age,filename,created_on)
            db.session.add(user_profile)
            db.session.commit()
            flash("User Successfully Added", category='success')
            return redirect('/profiles')
    return render_template('profileform.html',form=form)
Ejemplo n.º 32
0
def add_deck_cards():
    session = Session()
    amt = 0
    while 1:
        decks = session.query(Deck).filter(~exists().where(
            DeckCard.deck_key == Deck.key)).limit(1000).all()
        if not decks:
            break
        for deck in decks:
            added = {}
            for card_key in deck.data["_links"]["cards"]:
                if card_key not in added:
                    new_card = DeckCard(deck_key=deck.key,
                                        card_key=card_key,
                                        card_deck_expansion=deck.expansion,
                                        count=1)
                    session.add(new_card)
                    added[card_key] = new_card
                else:
                    added[card_key].count += 1
            session.commit()
            amt += 1
            print(deck.key, amt)
 async def addRacer(self, *name):
     if (self.session.query(
             exists().where(Racer.name == name[0])).scalar()):
         await self.bot.say("This racer already exist")
         return
     if len(name) == 0:
         await self.bot.say("Bad Arguments")
         return
     elif len(name) == 1:
         racer = Racer(name=name[0],
                       name_racing=name[0],
                       name_trueskill=name[0])
     elif len(name) == 2:
         racer = Racer(name=name[0],
                       name_racing=name[1],
                       name_trueskill=name[0])
     elif len(name) == 3:
         racer = Racer(name=name[0],
                       name_racing=name[1],
                       name_trueskill=name[2])
     self.session.add(racer)
     self.session.commit()
     await self.bot.say("{} has been added as a racer".format(racer.name))
Ejemplo n.º 34
0
 def insert_items(self, data: dict) -> None:
     validating = ValidateSplitInfo(**data)
     secid = validating.secid
     securities_processor = SecuritiesInfo()
     securities_id_list = securities_processor.securities_query_get_id(secid)
     for _id in securities_id_list:
         data_exists = db.session.query(
             exists().where(
                 and_(
                     SecuritiesSplits.securities_id == _id,
                     SecuritiesSplits.tradedate == validating.tradedate
                 )
             )
         ).scalar()
         if data_exists:
             continue
         self.items = {
             'securities_id': _id,
             'tradedate': validating.tradedate,
             'before': validating.before,
             'after': validating.after
         }
         self.mappings.append(self.items)
Ejemplo n.º 35
0
    def write_dag(cls,
                  dag: DAG,
                  min_update_interval: Optional[int] = None,
                  session=None):
        """Serializes a DAG and writes it into database.

        :param dag: a DAG to be written into database
        :param min_update_interval: minimal interval in seconds to update serialized DAG
        :param session: ORM Session
        """
        # Checks if (Current Time - Time when the DAG was written to DB) < min_update_interval
        # If Yes, does nothing
        # If No or the DAG does not exists, updates / writes Serialized DAG to DB
        if min_update_interval is not None:
            if session.query(exists().where(
                    and_(cls.dag_id == dag.dag_id,
                         (timezone.utcnow() -
                          timedelta(seconds=min_update_interval)) <
                         cls.last_updated))).scalar():
                return
        log.debug("Writing DAG: %s to the DB", dag.dag_id)
        session.merge(cls(dag))
        log.debug("DAG: %s written to the DB", dag.dag_id)
Ejemplo n.º 36
0
    def new_food(payload):
        foods = Food.query.all()
        form = NewFood()
        current_foods = Food.query.all()
        if db.session.query(
                exists().where(Food.food == request.form['food'])).scalar():
            flash('Looks like that food is already in our database.')

            return render_template('forms/new_food.html', form=form)

        new_food = Food(food=request.form['food'].lower(),
                        protein=request.form['protein'],
                        carbs=request.form['carbs'],
                        fat=request.form['fats'],
                        calories=request.form['calories'])

        new_food.insert()

        current_foods = Food.query.all()
        paged_foods = paginate_foods(request, current_foods)
        print(paged_foods)
        flash(request.form['food'] + ' has been successfully added!')
        return render_template('/pages/foods.html', foods=paged_foods)
Ejemplo n.º 37
0
def _semi_join(left, right=None, on=None, *args, sql_on=None):
    _raise_if_args(args)

    left_sel = left.last_op.alias()
    right_sel = right.last_op.alias()

    # handle arguments ----
    on = _validate_join_arg_on(on, sql_on)

    # create join conditions ----
    bool_clause = _create_join_conds(left_sel, right_sel, on)

    # create inner join ----
    exists_clause = sql.select([sql.literal(1)],
                               from_obj=right_sel,
                               whereclause=bool_clause)

    # only keep left hand select's columns ----
    sel = sql.select(left_sel.columns,
                     from_obj=left_sel,
                     whereclause=sql.exists(exists_clause))

    return left.append_op(sel, order_by=tuple())
Ejemplo n.º 38
0
def start_date(start):
    session = Session(engine)
    min_date = session.query(func.min(Measurement.date))
    min_date_str = min_date.scalar()
    max_date = session.query(func.max(Measurement.date))
    max_date_str = max_date.scalar()
    min_max_average = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\
        filter(Measurement.date >= start)
    
    date_exists = session.query(exists().where(Measurement.date == start)).scalar()
#temperature dates
    if date_exists:
        temps_list =[]
        for temps in min_max_average:
            temps_dict = {}
            temps_dict["Min Temp"] = temps[0]
            temps_dict["Average Temp"] = temps[1]
            temps_dict["Max Temp"] = temps[2]
            temps_list.append(temps_dict)

        return jsonify(temps_list)

    return jsonify({"Error": f"Input Date {start} not valid. Date Range is {min_date_str} to {max_date_str}"}), 404
Ejemplo n.º 39
0
def getPublicaciones(uid, u):
    """
    Get the publications corresponding to the user with id: uid.
    u: boolean variable that indicates if the uid correspond to the logged user.
    Check if the user has publications. If not, returns null.
    """
    publicaciones = []
    user = session.query(Usuario).filter(Usuario.id == uid).one()
    q = session.query(exists().where(Publicacion.uid == user.id)).scalar()
    if not q:
        return None
    pub = session.query(Publicacion).filter(Publicacion.uid == uid).order_by(
        Publicacion.fecha.desc()).all()
    for p in pub:
        publicacion = {}
        publicacion["img"] = getImage(uid, True)
        publicacion["name"] = user.nombre + " " + user.apellido
        publicacion["text"] = p.texto
        publicacion["fecha"] = p.fecha
        publicacion["username"] = user.username
        publicacion["user"] = u
        publicaciones.append(publicacion)
    return publicaciones
Ejemplo n.º 40
0
    def process_item(self, item, spider):

        developer_entry = {
            "developer_group_id": item["developer_group_id"],
            "developer_group_name": item["developer_group_name"],
            "developer_group_address": item["developer_group_address"]
        }

        developer_exists = session_test.query(exists().where(
            and_(
                Developer.developer_group_id ==
                developer_entry['developer_group_id'],
                Developer.developer_group_name ==
                developer_entry['developer_group_name'],
                Developer.developer_group_address ==
                developer_entry['developer_group_address']))).scalar()

        if not developer_exists:
            adding_developer = Developer(**developer_entry)
            session_test.add(adding_developer)
            session_test.commit()

        return item
Ejemplo n.º 41
0
Archivo: mjtt.py Proyecto: leegggg/mjyy
def getAllScores(engine):
    import random
    import re
    Session = sessionmaker(bind=engine)

    while True:
        try:
            session = Session()
            from sqlalchemy.sql import exists
            results = session.query(Page).filter(Page.link.like('https://www.meijutt.com/content/meiju%.html')) \
                .filter(~exists().where(Page.link == Score.source)).order_by('des').limit(2000).all()
            session.commit()
            if len(results) == 0:
                break
            index = random.randrange(len(results))
            page: Page = results[index]
            link = page.link
            score = readScore(link)
            session = Session()
            session.merge(score)
            session.commit()
        except Exception as e:
            print("Error download torrent for {}".format(e))
 def add_reservation(self, rsv, mpls_label):
     """
     rsv: dict containing reservation info
     """
     exist = self.session.query(
         exists().where(QoSReservation.src == rsv["src"]).where(
             QoSReservation.dst == rsv["dst"])).scalar()
     if not exist:
         in_switch = self.get_switch_for_ip(rsv["src"])
         in_port = self.get_port_for_ip(rsv["src"])
         out_switch = self.get_switch_for_ip(rsv["dst"])
         out_port = self.get_port_for_ip(rsv["dst"])
         reservation = QoSReservation(src=rsv["src"],
                                      dst=rsv["dst"],
                                      bw=rsv["bw"],
                                      mpls_label=mpls_label,
                                      in_port=in_port.id,
                                      out_port=out_port.id)
         return self.add_record(reservation)
     else:
         return self.session.query(QoSReservation) \
             .filter(QoSReservation.src == rsv["src"]) \
             .filter(QoSReservation.dst == rsv["dst"]).first()
Ejemplo n.º 43
0
def profile(username=None):
    form = CreateUserForm()
    user = None
    if request.method == 'POST':
        username = form.username.data
        if not db.session.query(
                exists().where(Users.username == username)).scalar():
            gender_type = form.gender.data + '.png'
            user = Users(first_name=form.firstname.data,
                         last_name=form.lastname.data,
                         username=username,
                         password=form.password.data,
                         age=int(form.age.data),
                         gender=form.gender.data,
                         profile_photo=gender_type,
                         bio=form.bio.data)
            filefolder = app.config["UPLOAD_FOLDER"]
            file = request.files['image']
            if file and validate_file(file.filename):
                filename = secure_filename(file.filename)
                filename = '{0}.{1}'.format(user.username,
                                            filename.split('.')[-1])
                file.save(os.path.join(filefolder, filename))
                user.profile_photo = filename
            db.session.add(user)
            db.session.commit()
            user = Users.query.all()
            return redirect(url_for('profiles'))
        else:
            flash('Username already in use.', 'danger')
            return render_template('form.html', form=form)
    elif username:
        users = Users.query.all()
        for user in users:
            if user.username == username:
                return render_template('profile.html', user=user)
    return render_template('form.html', form=form)
Ejemplo n.º 44
0
def add_application(obj, override=True, temp=False):
    """
    PatentApplication Object converting to tables via SQLAlchemy
    Necessary to convert dates to datetime because of SQLite (OK on MySQL)

    Case Sensitivity and Table Reflection
    MySQL has inconsistent support for case-sensitive identifier names,
    basing support on specific details of the underlying operating system.
    However, it has been observed that no matter what case sensitivity
    behavior is present, the names of tables in foreign key declarations
    are always received from the database as all-lower case, making it
    impossible to accurately reflect a schema where inter-related tables
    use mixed-case identifier names.

    Therefore it is strongly advised that table names be declared as all
    lower case both within SQLAlchemy as well as on the MySQL database
    itself, especially if database reflection features are to be used.
    """

    # if the application exists, remove it so we can replace it
    (app_exists, ), = appsession.query(
        exists().where(schema.App_Application.number == obj.application))
    if app_exists:
        if override:
            app_query = appsession.query(schema.App_Application).filter(
                schema.App_Application.number == obj.application)
            appsession.delete(app_query.one())
        else:
            return
    if len(obj.app["number"]) < 3:
        return

    app = schema.App_Application(**obj.app)
    # lots of abstracts seem to be missing. why?
    add_all_app_fields(obj, app)

    appsession.merge(app)
Ejemplo n.º 45
0
def login():
    next = get_redirect_target()
    pid=None
    if request.method=='POST':
        email1=request.form['email']
        name1=request.form['name']
        

        if session.query(exists().where(User.email==email1)).scalar():
            uk=session.query(User).filter_by(email=email1).one()
            if name1==uk.name :
                global pid
                pid=uk.id
                if uk.active==False:
                    global pid
                    pid=uk.id
                    login_user(uk)
                    return redirect_back('newpoll')
                else:
                    global pid
                    pid=uk.id
                    login_user(uk)
                    return redirect_back('newpoll')
                
            else:
                return render_template('signup1.html',next=next)
        else:
            uk=User(email=email1,name=name1)

            session.add(uk)
            session.commit()
            global pid
            pid=uk.id
            login_user(uk)
            return redirect_back('newpoll')
        
    return render_template ('signup1.html',next=next)
 async def cancelMatch(self, race_id):
     if (not self.session.query(
             exists().where(Race.id == race_id)).scalar()):
         await self.bot.say("This race doesn't exist")
         return
     race = self.session.query(Race).get(race_id)
     if not race.ongoing:
         await self.bot.say("The race is already closed")
         return
     DaCream = self.session.query(Better).filter(
         Better.id == self.bot.user.id).first()
     totalbet = 0
     for bet in self.session.query(Bet).filter(
             Bet.race_id == race_id):  #Regroup better
         better = bet.better
         totalbet = totalbet + bet.coin_bet
         better.coin = better.coin + bet.coin_bet
         DaCream.coin = DaCream.coin - bet.coin_bet
     bot_channel = discord.utils.get(self.bot.get_all_channels(),
                                     name=BOT_CHANNEL)
     await self.bot.send_message(
         bot_channel,
         ("```Match#{} is canceled, bet money has been refunded```").format(
             race.id))
     race.ongoing = False
     race.betsOn = False
     board_channel = self.bot.get_channel(board_id)
     await displayOpenRaces(self.session, self.bot)
     sumup_channel = discord.utils.get(self.bot.get_all_channels(),
                                       name=SUMUP_CHANNEL)
     await self.bot.send_message(
         sumup_channel,
         "**Sum up of {}**\n```Match#{} is canceled, {} coins have been refunded```"
         .format(race, race.id, totalbet))
     job = self.session.query(Job).filter(Job.race_id == race_id).first()
     if job: self.session.delete(job)
     self.session.commit()
Ejemplo n.º 47
0
    def populateUsersTable(self):
        '''
		Check that user is alive
		Check that user has a twitter username
		Check that the username is not already in the database
		Check that the user has a bio
		'''
        session = self.session
        trends = session.query(Trend).all()
        for trend in trends:
            trend_url = trend.trend_url
            user_ids, wiki_urls, explore_urls = UserUtils().get_trend_infos(
                trend_url)
            if not (len(user_ids) == len(wiki_urls) == len(explore_urls) ==
                    10):
                continue
            for i in range(10):
                user_id = user_ids[i]
                user_id_exists = session.query(
                    exists().where(User.id == user_id)).scalar()
                if user_id_exists:
                    continue
                wiki_url = wiki_urls[i]
                bio = UserUtils().get_bio(wiki_url)
                if not bio:
                    continue
                twitter_name = UserUtils().get_twitter_name(user_id)
                if not twitter_name:
                    continue
                explore_url = explore_urls[i]
                row = User(id=user_id,
                           twitter_name=twitter_name,
                           bio=bio,
                           wiki_url=wiki_url,
                           explore_url=explore_url)
                session.add(row)
            session.commit()
Ejemplo n.º 48
0
class Link(Base, TimestampColumns):
    hashid = Column(String, index=True, unique=True, nullable=False)
    _url = Column('url', Unicode, index=True, nullable=False)
    user_id = Column(BigInteger)
    _hash = Column('hash', String, index=True, nullable=False)
    _hash_netloc = Column('hash_netloc', String, nullable=False)
    _hash_path = Column('hash_path', String, nullable=False)
    meta = Column(MutableDict.as_mutable(JSONB), default={})
    __table_args__ = (
        Index('unique_url_and_user_id', _url, user_id, unique=True,
              postgresql_where=(user_id != None)),
        Index('unique_url_and_user_id_is_null', 'url', unique=True,
              postgresql_where=(user_id == None))
    )

    is_banned = column_property(
        exists().where(
            or_(
                BlockedLink.hash.in_([_hash, _hash_netloc, _hash_path])
            )
        )
    )

    @property
    def clicks(self):
        if self._clicks:
            return self._clicks.counter
        return 0
    
    @hybrid_property
    def url(self):
        return self._url

    @url.setter
    def url(self, value):
        self._url = value
        self._hash, self._hash_netloc, self._hash_path = create_url_hashes(value)
Ejemplo n.º 49
0
    def write_dag(cls,
                  dag: DAG,
                  min_update_interval: Optional[int] = None,
                  session: Session = None):
        """Serializes a DAG and writes it into database.
        If the record already exists, it checks if the Serialized DAG changed or not. If it is
        changed, it updates the record, ignores otherwise.

        :param dag: a DAG to be written into database
        :param min_update_interval: minimal interval in seconds to update serialized DAG
        :param session: ORM Session
        """
        # Checks if (Current Time - Time when the DAG was written to DB) < min_update_interval
        # If Yes, does nothing
        # If No or the DAG does not exists, updates / writes Serialized DAG to DB
        if min_update_interval is not None:
            if session.query(exists().where(
                    and_(cls.dag_id == dag.dag_id,
                         (timezone.utcnow() -
                          timedelta(seconds=min_update_interval)) <
                         cls.last_updated))).scalar():
                return

        log.debug("Checking if DAG (%s) changed", dag.dag_id)
        new_serialized_dag = cls(dag)
        serialized_dag_hash_from_db = session.query(
            cls.dag_hash).filter(cls.dag_id == dag.dag_id).scalar()

        if serialized_dag_hash_from_db == new_serialized_dag.dag_hash:
            log.debug(
                "Serialized DAG (%s) is unchanged. Skipping writing to DB",
                dag.dag_id)
            return

        log.debug("Writing Serialized DAG: %s to the DB", dag.dag_id)
        session.merge(new_serialized_dag)
        log.debug("DAG: %s written to the DB", dag.dag_id)
Ejemplo n.º 50
0
def get_acquisition_framework_stats(info_role, id_acquisition_framework):
    """
    Get stats from one AF
    .. :quickref: Metadata;
    :param id_acquisition_framework: the id_acquisition_framework
    :param type: int
    """
    datasets = TDatasets.query.filter(
        TDatasets.id_acquisition_framework == id_acquisition_framework).all()
    dataset_ids = [d.id_dataset for d in datasets]

    nb_dataset = len(dataset_ids)
    nb_taxons = (DB.session.query(Synthese.cd_nom).filter(
        Synthese.id_dataset.in_(dataset_ids)).distinct().count())
    nb_observations = (DB.session.query(Synthese.cd_nom).filter(
        Synthese.id_dataset.in_(dataset_ids)).count())
    nb_habitat = 0

    # Check if pr_occhab exist
    check_schema_query = exists(
        select([text("schema_name")
                ]).select_from(text("information_schema.schemata")).where(
                    text("schema_name = 'pr_occhab'")))

    if DB.session.query(check_schema_query).scalar() and nb_dataset > 0:
        query = (
            "SELECT count(*) FROM pr_occhab.t_stations s, pr_occhab.t_habitats h WHERE s.id_station = h.id_station AND s.id_dataset in \
        (" + str(dataset_ids).strip("[]") + ")")

        nb_habitat = DB.engine.execute(text(query)).first()[0]

    return {
        "nb_dataset": nb_dataset,
        "nb_taxons": nb_taxons,
        "nb_observations": nb_observations,
        "nb_habitats": nb_habitat,
    }
Ejemplo n.º 51
0
def update_comments():
    """Update comments for each game."""
    new_comments = json.loads(utils.fetch_comments())
    today = datetime.today().strftime('%Y%m%d')
    games = db.session.query(Game).filter_by(game_date=today).all()
    for new_comment in new_comments:
        for game in games:
            if new_comment["thread_id"] == game.thread_id:
                if db.session.query(
                        exists().where(Comment.comment_id ==
                                       new_comment["comment_id"])).scalar():
                    pass
                else:
                    comment = Comment(author=new_comment["author"],
                                      body=new_comment["body"],
                                      author_flair_css_class=new_comment[
                                          "author_flair_css_class"],
                                      comment_id=new_comment["comment_id"],
                                      score=new_comment["score"],
                                      created_utc=new_comment["created_utc"],
                                      emitted="false")
                    game.comments.append(comment)
    db.session.commit()
    return True
        def fill_table(str_table, table_type, field_names):
            # перебираем строки
            for row in str_table.split('\n'):
                # разбиваем строку на поля
                fields = list(map(str.strip, row.split('|')))

                # строим компаратор
                # с помощью getattr(table_type, field_name)
                # получаем конструкции table_type.field_name
                # наподобие Author.country
                # сравниваем столбцы с соотв. полями
                # для каждого поля и имени столбца
                # делаем and для всех полученных сравнений
                comparator = and_(*[getattr(table_type, field_name) == field
                                    for field_name, field
                                    in zip(field_names, fields)])

                # проверяем, существует ли запись
                # с таким набором значений в таблице
                # если существует, идём дальше
                if (self.session.query(exists()
                                       .where(comparator))
                                .scalar()):
                    continue

                # создаем элемент таблицы
                instance = table_type()
                # заполняем элемент данными
                # setattr(instance, field_name, field) разворачивается
                # в конструкции instance.field_name = field
                # например author.country = 'Russia'
                for field_name, field in zip(field_names, fields):
                    setattr(instance, field_name, field)
                # добавляем элемент в базу
                self.session.add(instance)
                self.session.commit()
Ejemplo n.º 53
0
    def get_subnet_segment_ids(cls, context, network_id,
                               ignored_service_type=None,
                               subnet_id=None):
        query = context.session.query(cls.db_model.segment_id)
        query = query.filter(cls.db_model.network_id == network_id)

        # NOTE(zigo): Subnet who hold the type ignored_service_type should be
        # removed from the segment list, as they can be part of a segmented
        # network but they don't have a segment ID themselves.
        if ignored_service_type:
            service_type_model = SubnetServiceType.db_model
            query = query.filter(~exists().where(and_(
                     cls.db_model.id == service_type_model.subnet_id,
                     service_type_model.service_type == ignored_service_type)))

        # (zigo): When a subnet is created, at this point in the code,
        # its service_types aren't populated in the subnet_service_types
        # object, so the subnet to create isn't filtered by the ~exists
        # above. So we just filter out the subnet to create completely
        # from the result set.
        if subnet_id:
            query = query.filter(cls.db_model.id != subnet_id)

        return [segment_id for (segment_id,) in query.all()]
Ejemplo n.º 54
0
def copy_Ads_Codes():
    """Function used to copy the ancient table to the new one with 
    the new field ad_id. The object "tmp" does not exist anymore in the
    db.py ressource."""
    for i in session.query(Ads_Codes):
        if session.query(exists().where(
                Ads_Codes_tmp.ad_number == i.ad_number)).scalar():
            pass
        else:
            with open(
                    f"./results/getCodes/documentation/{i.ad_number}__documentation.json",
                    "r") as f:
                doc = json.loads(f.read())
                url = doc["info"]["selenium"][0]["request"]["url"]
                abr_country = get_abr_country(url)
            entry = Ads_Codes_tmp(ad_id=f"{i.ad_number}_{abr_country}",
                                  ad_number=i.ad_number,
                                  client_code=i.client_code,
                                  date_created=i.date_created,
                                  date_updated=i.date_updated,
                                  status=i.status,
                                  status_image_taken=i.status_image_taken,
                                  status_vendeur_taken=i.status_vendeur_taken)
            entry.insertCode(session)
Ejemplo n.º 55
0
def start_only(start):
    # Create session (link) from Python to the DB
    session = Session(engine)

    # Date Range (only for help to user in case date gets entered wrong)
    date_range_max = session.query(Measurement.date).order_by(Measurement.date.desc()).first()
    date_range_max_str = str(date_range_max)
    date_range_max_str = re.sub("'|,", "", date_range_max_str)
    print(date_range_max_str)

    date_range_min = session.query(Measurement.date).first()
    date_range_min_str = str(date_range_min)
    date_range_min_str = re.sub("'|,", "", date_range_min_str)
    print(date_range_min_str)

    # Check for valid entry of start date
    valid_entry = session.query(exists().where(Measurement.date == start)).scalar()

    if valid_entry:
        results = (session.query(func.min(Measurement.tobs)
                                 , func.avg(Measurement.tobs)
                                 , func.max(Measurement.tobs))
                   .filter(Measurement.date >= start).all())

        tmin = results[0][0]
        tavg = '{0:.4}'.format(results[0][1])
        tmax = results[0][2]

        result_printout = (['Entered Start Date: ' + start,
                            'The lowest Temperature was: ' + str(tmin) + ' F',
                            'The average Temperature was: ' + str(tavg) + ' F',
                            'The highest Temperature was: ' + str(tmax) + ' F'])
        return jsonify(result_printout)

    return jsonify(
        {"error": f"Input Date {start} not valid. Date Range is {date_range_min_str} to {date_range_max_str}"}), 404
Ejemplo n.º 56
0
def add_interfaces(session):
    """ Add a default interface for all HW that has an IP """
    q = session.query(HardwareEntity)
    q = q.filter(~exists().where(
        Interface.hardware_entity_id == HardwareEntity.id))
    q = q.outerjoin(PrimaryNameAssociation, System, DnsDomain)
    q = q.options(contains_eager('_primary_name_asc'))
    q = q.options(contains_eager('_primary_name_asc.dns_record'))
    q = q.options(contains_eager('_primary_name_asc.dns_record.dns_domain'))
    q = q.filter(System.ip != None)

    hws = q.all()
    count = 0
    for hw in hws:
        if hw.hardware_type == "machine":
            interface = "eth0"
            itype = "public"
        elif hw.hardware_type == "switch":
            interface = "xge"
            itype = "oa"
        else:
            interface = "oa"
            itype = "oa"

        #print "Adding default interface for {0:l}".format(hw)

        dbinterface = Interface(
            hardware_entity=hw,
            name=interface,
            interface_type=itype,
            comments="Created automatically by upgrade script")
        session.add(dbinterface)
        count += 1

    session.flush()
    print "Added %d interfaces" % count
Ejemplo n.º 57
0
def add_student():
    """Add student to db"""

    # User reached route via POST (as by submitting a form via POST)
    if request.method == "POST":

        # Ensure student_name was submitted
        if not request.form.get("student_name"):
            return apology("Please provide student's name", 401)

        # Check if student is already in database
        name = request.form.get("student_name")
        q = db.session.query(exists().where(Students.student == name)).scalar()
        if q:
            return apology("The student's name is already in database.", 402)
        else:
            student = Students(student=name)
            db.session.add(student)
            db.session.commit()

    # User reached route via GET (as by clicking a link or via redirect)
    else:
        return render_template("/index.html")
    return redirect('/')
Ejemplo n.º 58
0
def is_duplicate_schema(forms, errors, db_session):
    """
    Test for duplicate schema in the db

    :forms: A list of forms represented by dictionaries
    :errors: A list of dictionaries of error datastore
    :db_session: required for db operations

    :return: updated error list
    """
    forms = get_unique_forms(forms)

    for form in forms:
        name = form[0]
        title = form[1]
        publish_date = form[2]
        form_exists = (
            db_session.query(
                exists()
                .where(datastore.Schema.name == name)
                .where(datastore.Schema.title == title)
                .where(datastore.Schema.publish_date == publish_date)).scalar()
        )

        if form_exists:
            errors.append(
                {
                    'schema_name': name,
                    'schema_title': title,
                    'name': u'N/A',
                    'title': u'N/A',
                    'errors': 'Duplicate schema -  already exists in the db'
                }
            )

    return errors
Ejemplo n.º 59
0
    def test_upsert_from_select(self):
        orders = table(
            'orders',
            column('region'),
            column('amount'),
            column('product'),
            column('quantity')
        )

        upsert = (
            orders.update()
            .where(orders.c.region == 'Region1')
            .values(amount=1.0, product='Product1', quantity=1)
            .returning(*(orders.c._all_columns)).cte('upsert'))

        insert = orders.insert().from_select(
            orders.c.keys(),
            select([
                literal('Region1'), literal(1.0),
                literal('Product1'), literal(1)
            ]).where(~exists(upsert.select()))
        )

        self.assert_compile(
            insert,
            "WITH upsert AS (UPDATE orders SET amount=:amount, "
            "product=:product, quantity=:quantity "
            "WHERE orders.region = :region_1 "
            "RETURNING orders.region, orders.amount, "
            "orders.product, orders.quantity) "
            "INSERT INTO orders (region, amount, product, quantity) "
            "SELECT :param_1 AS anon_1, :param_2 AS anon_2, "
            ":param_3 AS anon_3, :param_4 AS anon_4 WHERE NOT (EXISTS "
            "(SELECT upsert.region, upsert.amount, upsert.product, "
            "upsert.quantity FROM upsert))"
        )
Ejemplo n.º 60
0
def save_filehistory(session, record):
    if not should_record(record):
        logging.error('invalid activity record: %s' % record)
        return

    # use same file_uuid if prev item already exists, otherwise new one
    prev_item = query_next_record(session, record)
    if prev_item:
        if record['path'] != prev_item.path and record['op_type'] == 'recover':
            pass
        else:
            record['file_uuid'] = prev_item.file_uuid

    if not record.has_key('file_uuid'):
        file_uuid = uuid.uuid4()
        # avoid hash conflict
        while session.query(
                exists().where(FileHistory.file_uuid == file_uuid)).scalar():
            file_uuid = uuid.uuid4()
        record['file_uuid'] = file_uuid

    filehistory = FileHistory(record)
    session.add(filehistory)
    session.commit()