def __init__(self, course_hedu_id, bra_id): self._hedu = None self._hedu_sorted_by_enrolled = None self._hedu_sorted_by_entrants = None self._hedu_sorted_by_graduates = None self._hedu_major_rank = None self.course_hedu_id = course_hedu_id self.bra_id = bra_id if course_hedu_id is None and bra_id is None: self.max_year_query = db.session.query(func.max(Yc_hedu.year)) self.hedu_query = Ybc_hedu.query.filter(Ybc_hedu.year == self.max_year_query) else: self.max_year_query = db.session.query( func.max(Yc_hedu.year)).filter_by(course_hedu_id=course_hedu_id) if bra_id != '': self.hedu_query = Ybc_hedu.query.filter( Ybc_hedu.course_hedu_id == self.course_hedu_id, Ybc_hedu.bra_id == self.bra_id, Ybc_hedu.year == self.max_year_query) else: self.hedu_query = Yc_hedu.query.filter( Yc_hedu.course_hedu_id == self.course_hedu_id, Yc_hedu.year == self.max_year_query)
def main(): # First create the SQL db that we will dump to engine, table = init_db() connection = engine.connect() # Load up all this stuff - crappy code but it works (clean up if time but this whole script is a shoddy hack) clear_mappers() session = loadTables() session2 = loadOutput() # create a connection to the mongo DB client = MongoClient() db = client.dealtrader collection = db.raw_tweets while True: # get number of deals in the table cnttot = session.query(func.max(Deals.deal_id)) num_deals = cnttot[0][0] #print num_deals cntdone = session2.query(func.max(Output.deal_id)) min_deal = cntdone[0][0] or 0 #print min_deal res = session.query(Deals).all() for i in range(min_deal, num_deals): tweetid = int(res[i].tweet_id) q = session.query(Matches) mchres = q.filter(Matches.tweet_id == tweetid).all() tweet = collection.find_one( { 'id': tweetid } ) try: deal_id = res[i].deal_id origtext = tweet['text'] tweetts = str(tweet['created_at']) itemdescr = res[i].description itemprice = res[i].price itemurl = res[i].url lowest_price = min(list(map(lambda x : x.merchant_price, mchres))) best_listings = list(filter(lambda x : x.merchant_price==lowest_price, mchres)) best_listing = best_listings[0] bestprice = str(best_listing.merchant_price) bestlink = str(best_listing.url) ins = insert(table).values( deal_id = deal_id, tweet_id = tweetid, orig_text = origtext, tweet_ts = tweetts, description = itemdescr, price = itemprice, url = itemurl, best_price = bestprice, best_url = bestlink ) result = connection.execute(ins) except: pass
def current_predictions(session, station_id): predictions = session.query( db.PredictionRecord.trip_id, db.PredictionRecord.seconds_away_from_stop, func.max(db.PredictionRecord.stamp)).filter(db.PredictionRecord.station_id == station_id).group_by( db.PredictionRecord.trip_id, db.PredictionRecord.seconds_away_from_stop).order_by( func.max(db.PredictionRecord.stamp).desc()).all() return predictions
def __init__(self, product_id, bra_id): self._secex = None self.bra_id = bra_id self.product_id = product_id self.max_database_year = db.session.query(func.max(Ymbp.year)) self.max_year_query = db.session.query( func.max(Ymbp.year)).filter_by(bra_id=bra_id, hs_id=product_id).filter( Ymbp.year < self.max_database_year) self.secex_query = Ymbp.query.filter( Ymbp.hs_id == self.product_id, Ymbp.bra_id == self.bra_id, Ymbp.month == 0, Ymbp.year == self.max_year_query )
def save(self): dbconfig = DBConfig() db = dbconfig.get_db() table = db.get_hosts() session = db.get_session() qry = session.query(func.max(table.c.id).label("max_id")) res = qry.one() oid = res.max_id print "oid: ", oid if oid > -1: oid = oid + 1 else: oid = 1 i = table.insert() i.execute(id=oid, host_template_id=self.host.host_template_id, node_uri=self.host.node_uri, node_cluster_uri=self.host.node_cluster_uri) return oid
def _update_categories(db_session, message, synced_categories): now = datetime.utcnow() # We make the simplifying assumption that only the latest syncback action # matters, since it reflects the current local state. actionlog_id = ( db_session.query(func.max(ActionLog.id)) .filter( ActionLog.namespace_id == message.namespace_id, ActionLog.table_name == "message", ActionLog.record_id == message.id, ActionLog.action.in_(["change_labels", "move"]), ) .scalar() ) actionlog = db_session.query(ActionLog).get(actionlog_id) # We completed the syncback action /long enough ago/ (on average and # with an error margin) that: # - if it completed successfully, sync has picked it up; so, safe to # overwrite message.categories # - if syncback failed, the local changes made can be overwritten # without confusing the API user. # TODO[k]/(emfree): Implement proper rollback of local state in this case. # This is needed in order to pick up future changes to the message, # the local_changes counter is reset as well. if actionlog.status in ("successful", "failed") and (now - actionlog.updated_at).seconds >= 90: message.categories = synced_categories message.categories_changes = False
def showPost(tag_id, post_id): recruitcycle = db.session.query(func.max(models.Member.cycle).label("cycle")).first().cycle manager = models.Member.query.filter(or_(models.Member.cycle==recruitcycle, models.Member.cycle==recruitcycle-1)).filter(or_(models.Member.stem_dept_id==5,models.Member.stem_dept_id==6)).all() try: tag = models.Tag.query.get(tag_id) post = models.Post.query.get(post_id) if not post in tag.posts: abort(404) if not (tag and post): abort(404) post.hitCount = post.hitCount + 1 db.session.commit() return render_template( 'post_view.html', member=current_user.member, nav_id=6, tag=tag, post=post, notifications=notification.Generate(current_user.member), boards=models.Tag.query.filter_by(special=1).all(), manager=manager) except TemplateNotFound: abort(404)
def main(): member = current_user.member # task_levels = [0, 1, 2] # task_lists = [] # # for level in task_levels: # task_lists.append( # models.Task.query # .filter(or_(models.Task.contributors.contains(member), # models.Task.creator == member)) # .filter(models.Task.status != 3) # .filter_by(level=level).all()) # task_lists[2] = [t for t in task_lists[2] if t.parents] mem = models.Member issues = models.Task.query.get(0).children[::-1][0:3] recruitcycle = db.session.query(func.max(models.Member.cycle).label("cycle")).first().cycle manager = models.Member.query.filter(or_(models.Member.cycle==recruitcycle, models.Member.cycle==recruitcycle-1)).filter(or_(models.Member.stem_dept_id==5,models.Member.stem_dept_id==6)).all() try: return render_template( 'dashboard.html', member=current_user.member, nav_id=1, notifications=notification.Generate(current_user.member), boards=models.Tag.query.filter_by(special=1).all(), manager=manager, recruitcycle=recruitcycle, issues=issues,mem=mem) except TemplateNotFound: abort(404)
def showTask(id): recruitcycle = db.session.query(func.max(models.Member.cycle).label("cycle")).first().cycle manager = models.Member.query.filter(or_(models.Member.cycle==recruitcycle, models.Member.cycle==recruitcycle-1)).filter(or_(models.Member.stem_dept_id==5,models.Member.stem_dept_id==6)).all() try: task = models.Task.query.get(id) if not task: abort(404) if task.level == 0: return render_template( 'milestone.html', member=current_user.member, milestone=task, task=task, nav_id=5, notifications=notification.Generate(current_user.member), boards=models.Tag.query.filter_by(special=1).all(), manager=manager) if task.level == 1: return render_template( 'issue.html', member=current_user.member, issue=task, task=task, nav_id=6, notifications=notification.Generate(current_user.member), boards=models.Tag.query.filter_by(special=1).all(), manager=manager) if task.level == 2: return render_template( 'subtask.html', member=current_user.member, task=task, nav_id=6, notifications=notification.Generate(current_user.member), boards=models.Tag.query.filter_by(special=1).all(), manager=manager) except TemplateNotFound: abort(404)
def showSuggestion(page): recruitcycle = db.session.query(func.max(models.Member.cycle).label("cycle")).first().cycle manager = models.Member.query.filter(or_(models.Member.cycle==recruitcycle, models.Member.cycle==recruitcycle-1)).filter(or_(models.Member.stem_dept_id==5,models.Member.stem_dept_id==6)).all() totalSuggestion = len(models.Task.query.get(0).children[::-1]) - 1 end = totalSuggestion - 10 * (page - 1) + 1 start = totalSuggestion - 10 * page + 1 maxpage = round(totalSuggestion / 10 + 0.5) if start < 1 : start = 1 if end < 1 : abort(404) issues = models.Task.query.get(0).children[start:end:][::-1] try: mem = models.Member return render_template( 'suggestion.html', member=current_user.member, nav_id=5, notifications=notification.Generate(current_user.member), boards=models.Tag.query.filter_by(special=1).all(), mem=mem, manager=manager, issues=issues, maxpage = maxpage, page=page, totalSuggestion=totalSuggestion) except TemplateNotFound: abort(404)
def _update_categories(db_session, message, synced_categories): now = datetime.utcnow() # We make the simplifying assumption that only the latest syncback action # matters, since it reflects the current local state. actionlog_id = db_session.query(func.max(ActionLog.id)).filter( ActionLog.namespace_id == message.namespace_id, ActionLog.table_name == 'message', ActionLog.record_id == message.id, ActionLog.action.in_(['change_labels', 'move'])).scalar() if actionlog_id is not None: actionlog = db_session.query(ActionLog).get(actionlog_id) # Do /not/ overwrite message.categories in case of a recent local # change - namely, a still 'pending' action or one that completed # recently. if (actionlog.status == 'pending' or (now - actionlog.updated_at).seconds <= 90): return # We completed the syncback action /long enough ago/ (on average and # with an error margin) that: # - if it completed successfully, sync has picked it up; so, safe to # overwrite message.categories # - if syncback failed, the local changes made can be overwritten # without confusing the API user. # TODO[k]/(emfree): Implement proper rollback of local state in this case. # This is needed in order to pick up future changes to the message, # the local_changes counter is reset as well. message.categories = synced_categories message.categories_changes = False
def get_session_id(): query = db.session.query(func.max(MeasurementData.session).label("max_id")) res = query.one() if res.max_id is None: return 1 else: return res.max_id + 1
def index(): form = LinkForm() if form.validate_on_submit(): user_db = User.query.get(g.user.get_id()) if user_db is None: raise Exception('User not found in database') flash("Url: " + form.url.data) query = db.session.query(func.max(ShortUrl.id).label('max_id')) id = query.one().max_id id = int(id) + 1 if id else 1 url = BASE_URL + 's/' + dehydrate(id) short_url = ShortUrl(url=url, creator=user_db, delete_after_first_usage=form.delete_after_usage.data) title = get_title(form.url.data) full_url = FullUrl(url=form.url.data, short_url=short_url, title=title) db.session.add(short_url) db.session.add(full_url) db.session.commit() flash('Short url: {0}'.format(short_url.url)) return redirect('/') return render_template('index.html', title="Main", form=form)
def __init__(self, university_id): University.__init__(self, university_id) self.max_year_query = db.session.query(func.max(Yuc.year)) self.hedu_query = Yuc.query.filter( Yuc.university_id == self.university_id, Yuc.year == self.max_year_query, func.length(Yuc.course_hedu_id) == 6)
def get(self, min_rqst, max_rqst): session = self.loadSession() cntres = session.query(func.max(OutputTable.output_id)) num_deals = cntres[0][0] min_rqst = max(min_rqst,0) max_rqst = max(min_rqst, max_rqst) min_slct = num_deals-max_rqst max_slct = num_deals-min_rqst res = session.query(OutputTable).all() deal_list = [] for i in range(max_slct*(-1), min_slct*(-1)): j=i*(-1)-1 try: deal_list.append({ 'tweet_ts': res[j].tweet_ts, 'tweet_text': res[j].orig_text, 'desc': res[j].description, 'price': res[j].price, 'url': res[j].url, 'best_price': res[j].best_price, 'best_link': res[j].best_url }) except: pass return [num_deals, deal_list]
def __init__(self, bra_id): self._secex = None self._secex_sorted_by_exports = None self._secex_sorted_by_imports = None self._secex_sorted_by_distance = None self._secex_sorted_by_opp_gain = None self.bra_id = bra_id self.max_database_year = db.session.query(func.max(Ymbp.year)) self.max_year_query = db.session.query( func.max(Ymbp.year)).filter_by(bra_id=self.bra_id).filter( Ymbp.year < self.max_database_year) self.secex_query = Ymbp.query.join(Hs).filter( Ymbp.bra_id == self.bra_id, Ymbp.month == 0, Ymbp.hs_id_len == 6, Ymbp.year == self.max_year_query)
def may_challenge(player_name, cool_down_time_range_m, cool_down_random_salt): """ Check whether the given challenger is not placing a new challenge too quickly after the previous one. """ player = db.session.query(Player).filter(Player.name == player_name).one() most_recent_challenge_game_date = db.session.\ query(func.max(Game.date)).\ filter(Game.challenger == player).\ first()[0] if not most_recent_challenge_game_date: return True # find for this game date, the 'cooldown' time in minutes # this is random out of a range, but a random value which is unique and constant # per game date/time (seeded) random.seed(cool_down_random_salt + str(most_recent_challenge_game_date)) cooldown_period_m = random.randint(*cool_down_time_range_m) # player may challenge again if more minutes have passed # than the cooldown period requires lapsed_minutes = (datetime.utcnow() - most_recent_challenge_game_date).total_seconds() / 60 return lapsed_minutes > cooldown_period_m
def _get_latest(self, session): """Get the time for the latest entry in this Sink.""" latest = session.query(func.max(self.table.columns.time)).scalar() _LOG.debug("Latest entry in %s %s", self.table, latest) return latest
def has_exceeded_traffic(user): """ The function calculates the balance of the users traffic. :param user: The user object which has to be checked. :return: True if the user has more traffic than allowed and false if he did not exceed the limit. """ result = session.session.query( User.id, (func.max(TrafficGroup.traffic_limit) * 1.10) < func.sum(TrafficVolume.size).label("has_exceeded_traffic") ).join( User.active_traffic_groups ).join( User.user_hosts ).join( Host.ips ).join( Ip.traffic_volumes ).filter( User.id == user.id ).group_by( User.id ).first() if result is not None: return result.has_exceeded_traffic else: return False
def post_process(cls, db, **kwargs): ''' delete all 'depature_time' values that appear for the last stop time of a given trip (e.g., the trip ends there, so there isn't a further vehicle departure for that stop time / trip pair)... NOTE: we know this breaks the current GTFS spec, which states that departure & arrival times must both exist for every stop time. Sadly, GTFS is wrong... ''' log.debug('{0}.post_process'.format(cls.__name__)) # remove the departure times at the end of a trip log.info("QUERY StopTime") sq = db.session.query(StopTime.trip_id, func.max(StopTime.stop_sequence).label('end_sequence')) sq = sq.group_by(StopTime.trip_id).subquery() q = db.session.query(StopTime) q = q.filter_by(trip_id=sq.c.trip_id, stop_sequence=sq.c.end_sequence) for r in q: r.departure_time = None # remove the arrival times at the start of a trip log.info("QUERY StopTime") sq = db.session.query(StopTime.trip_id, func.min(StopTime.stop_sequence).label('start_sequence')) sq = sq.group_by(StopTime.trip_id).subquery() q = db.session.query(StopTime) q = q.filter_by(trip_id=sq.c.trip_id, stop_sequence=sq.c.start_sequence) for r in q: r.arrival_time = None db.session.commit()
def stock_checkpullthru(stockid, maxtime): """Did this stock item require pulling through?""" return s.execute( select([func.now() - func.max(StockOut.time) > maxtime]).\ where(StockOut.stockid == stockid).\ where(StockOut.removecode_id.in_(['sold', 'pullthru'])) ).scalar()
def add_transformed(self, csvfile, name=None, param=None): """ Import deformed point coordinates from ANTs to image Assumes LPS->RAS conversion. """ self.init_db() new_trans = Transform(name=name, params=param) self.session.add(new_trans) self.session.commit() trans_id = new_trans.id id_delta = self.session.query(func.max(Point.id)).first()[0] if id_delta is None: id_delta = 0 else: id_delta += 1 print 'insert new points' point_queue = [] mapping_queue = [] with open(csvfile, 'rb') as fp: preader = csv.DictReader(fp) for i,r in enumerate(preader): pid = i+id_delta point_queue.append({'id':pid, 'x':float(r['x'])*-1, 'y':float(r['y'])*-1, 'z':float(r['z'])}) mapping_queue.append({'orig_id':r['label'], 'result_id':pid, 'transform_id':trans_id}) self.engine.execute(Point.__table__.insert(), point_queue) self.engine.execute(PointMapping.__table__.insert(), mapping_queue)
def index(university_id): university = UniversityModel.query.filter_by(id=university_id).first_or_404() university_service = University(university.id) majors_service = UniversityMajors(university.id) header = { 'year': university_service.year(), 'type': university_service.university_type(), 'enrolled': university_service.enrolled(), 'entrants': university_service.entrants(), 'graduates': university_service.graduates() } body = { 'major_with_more_enrollments': majors_service.major_with_more_enrollments(), 'highest_enrollment_number_by_major': majors_service.highest_enrolled_number(), 'major_with_more_entrants': majors_service.major_with_more_entrants(), 'highest_entrant_number_by_major': majors_service.highest_entrants_number(), 'major_with_more_graduates': majors_service.major_with_more_graduates(), 'highest_graduate_number_by_major': majors_service.highest_graduates_number(), 'year': majors_service.year(), } hedu_max_year = db.session.query(func.max(Yu.year)).first()[0] if header['enrolled'] is None or hedu_max_year != body['year']: abort(404) else: return render_template('university/index.html', university=university, header=header, body=body)
def null_out_last_stop_departures(cls, db): ''' delete all 'depature_time' values that appear for the last stop time of a given trip (e.g., the trip ends there, so there isn't a further vehicle departure / customer pickup for that stop time / trip pair)... -- query below shows null'd out stop times select * from ott.stop_times where COALESCE(arrival_time,'')='' or COALESCE(departure_time,'')='' NOTE: we know this breaks the current GTFS spec, which states that departure & arrival times must both exist for every stop time. Sadly, GTFS is kinda wrong... ''' # step 1: remove the departure times at the end of a trip log.info("QUERY StopTime for all trip end times") sq = db.session.query(StopTime.trip_id, func.max(StopTime.stop_sequence).label('end_sequence')) sq = sq.group_by(StopTime.trip_id).subquery() q = db.session.query(StopTime) q = q.filter_by(trip_id=sq.c.trip_id, stop_sequence=sq.c.end_sequence) for st in q: if st.pickup_type == 1: st.departure_time = None # remove the arrival times at the start of a trip log.info("QUERY StopTime for all trip start times") sq = db.session.query(StopTime.trip_id, func.min(StopTime.stop_sequence).label('start_sequence')) sq = sq.group_by(StopTime.trip_id).subquery() q = db.session.query(StopTime) q = q.filter_by(trip_id=sq.c.trip_id, stop_sequence=sq.c.start_sequence) for st in q: if st.drop_off_type == 1: st.arrival_time = None db.session.flush() db.session.commit() db.session.close()
def __init__(self, university_id): self._hedu = None self._hedu_sorted_by_enrolled = None self._hedu_sorted_by_entrants = None self._hedu_sorted_by_graduates = None self.university_id = university_id if university_id is None: self.max_year_query = db.session.query(func.max(Yu.year)) self.hedu_query = Yu.query.filter(Yu.year == self.max_year_query) else: self.max_year_query = db.session.query( func.max(Yu.year)).filter_by(university_id=university_id) self.hedu_query = Yu.query.filter( Yu.university_id == self.university_id, Yu.year == self.max_year_query)
def get_current_structure_states(self): """Get current structure states of the scenario. :returns: sqlalchemy Query object. """ subquery = Query([ ScenarioStructureState.scenario_id, ScenarioStructureState.tree_path, func.max(ScenarioStructureState.changed).label('newest_change_date') ]).filter_by( scenario_id=self.__id ).group_by( ScenarioStructureState.scenario_id, ScenarioStructureState.tree_path ).subquery() return Query([ ScenarioStructureState ]).join( subquery, and_( ScenarioStructureState.scenario_id == subquery.columns.scenario_id, ScenarioStructureState.tree_path == subquery.columns.tree_path, ScenarioStructureState.changed == subquery.columns.newest_change_date ) ).filter( ScenarioStructureState.enabled == True # pylint: disable=singleton-comparison )
def _attributes(request): ''' This service exposes preview values based on a layer Id and an attribute name (mapped in the model) ''' MAX_ATTR_VALUES = 50 attributesValues = [] params = _get_attributes_params(request) models = models_from_bodid(params.layerId) if models is None: raise exc.HTTPBadRequest('No Vector Table was found for %s' % params.layerId) # Check that the attribute provided is found at least in one model modelToQuery = None for model in models: attributes = model().getAttributesKeys() if params.attribute in attributes: modelToQuery = model break if modelToQuery is None: raise exc.HTTPBadRequest('No attribute %s was found for %s' % (params.attribute, params.layerId)) col = modelToQuery.get_column_by_property_name(params.attribute) colType = str(col.type) if colType in ['DATE', 'INTEGER', 'NUMERIC']: query = request.db.query(func.max(col).label('max'), func.min(col).label('min')) res = query.one() return {'values': [res.min, res.max]} else: query = request.db.query(col).distinct().order_by(col) query = query.limit(MAX_ATTR_VALUES) for attr in query: if len(attr): attributesValues.append(attr[0]) return {'values': sorted(attributesValues)}
def save(self): dbconfig = DBConfig() db = dbconfig.get_db() table = db.get_containers() session = db.get_session() qry = session.query(func.max(table.c.id).label("max_id")) res = qry.one() oid = res.max_id print "oid: ", oid if oid > -1: oid = oid + 1 else: oid = 1 i = table.insert() i.execute(id=oid, host_id=self.container.host_id, cpu=self.container.cpu, memory=self.container.memory, disk_size=self.container.disk_size, request_id=self.container.request_id, service_uri=self.container.service_uri, container_uri=self.container.container_uri) return oid
def get_number_comments(self, status=None): if not status: return Session.query(sa.func.count(Comment.id)).filter_by(change_id=self.id).first()[0] date = Session.query(func.max(CommentStatus.created_date).label('date'), Comment.id) date = date.filter(CommentStatus.comment_id==Comment.id).filter(Comment.change_id==self.id) date = date.group_by(CommentStatus.comment_id, Comment.id) subq = date.subquery() q = Session.query(func.count(Comment.id)).outerjoin((subq, subq.c.id==Comment.id)) q = q.outerjoin((CommentStatus, CommentStatus.comment_id==Comment.id)) q = q.filter(Comment.change_id==self.id).filter(Comment.status!=STATUS_REMOVED) q = q.filter(Comment.in_reply_to_id==None) if status == STATUS_OPEN: q = q.filter(sa.or_( CommentStatus.id==None, sa.and_(CommentStatus.created_date==subq.columns.date, CommentStatus.status==status) )) return q.scalar() else: q = q.filter( sa.and_(CommentStatus.created_date==subq.columns.date, CommentStatus.status==status) ) return q.scalar()
def recent(session, limit=5): sq = session.query(Rating.user_id, func.max(Rating.rated).label('max_rated'))\ .group_by(Rating.user_id).subquery() res = session.query(User)\ .join((sq, sq.c.user_id==User.id))\ .order_by(sq.c.max_rated.desc()).limit(limit).all() return res
def getLastCId(self, blogSeo): ''' @see IBlogSeoService.getLastCId ''' sql = self.session().query( func.max(BlogPostMapped.CId).label("LastCId")) sql = sql.filter(BlogSeoMapped.Id == blogSeo.Id) blogSeo.LastCId = sql.one()[0] return blogSeo
def get_max_last_updated_datetime(cls, session: Session = None ) -> Optional[datetime]: """ Get the maximum date when any DAG was last updated in serialized_dag table :param session: ORM Session """ return session.query(func.max(cls.last_updated)).scalar()
def _last_build(): max_expr = select([func.max(Build.id).label('mx')])\ .group_by(Build.package_id).alias() joined = select([Build]).select_from(join(Build, max_expr, Build.id == max_expr.c.mx))\ .alias() return relationship(mapper(Build, joined, non_primary=True), uselist=False, primaryjoin=(Package.id == joined.c.package_id))
def lastseenuid(account_id, session, folder_id): q = bakery(lambda session: session.query(func.max(ImapUid.msg_uid))) q += lambda q: q.filter( ImapUid.account_id == bindparam("account_id"), ImapUid.folder_id == bindparam("folder_id"), ) res = q(session).params(account_id=account_id, folder_id=folder_id).one()[0] return res or 0
def _set_seqno(article, pub_id): """Set an article's seq#.""" if pub_id: max_seqno = db.session.query( func.max( Article.article_seqno ) ) \ .filter( Article.pub_id == pub_id ) \ .scalar() article.article_seqno = 1 if max_seqno is None else max_seqno + 1 else: article.article_seqno = None
def __init__(self, product_id, bra_id): Product.__init__(self, product_id) self.max_year_query = db.session.query(func.max(Ympw.year)).filter_by( hs_id=product_id, month=12) self.secex_query = Ympw.query.join(Wld).filter( Ympw.hs_id == self.product_id, Ympw.wld_id_len == 5, Ympw.month == 0, Ympw.year == self.max_year_query) if bra_id: self.bra_id = bra_id self.max_year_query = db.session.query(func.max( Ymbpw.year)).filter_by(hs_id=product_id, bra_id=bra_id, month=12) self.secex_query = Ymbpw.query.join(Wld).filter( Ymbpw.hs_id == self.product_id, Ymbpw.year == self.max_year_query, Ymbpw.wld_id_len == 5, Ymbpw.bra_id == self.bra_id, Ymbpw.month == 0)
async def get_last_synced_svn_revision(self): async with self.engine.acquire() as conn: stmt = sqlalchemy.select([func.max(revisions_tbl.c.id)]) _logger.debug("Last revision SQL statement: {}".format(stmt)) commit_id = await conn.scalar(stmt) _logger.debug("Got commit id: {!r}".format(commit_id)) return commit_id
def __init__(self, bra_id): LocationUniversity.__init__(self, bra_id) self._hedu = None self.bra_id = bra_id self.max_year_query = db.session.query(func.max( Ybc_hedu.year)).filter_by(bra_id=bra_id) self.hedu_query = Ybc_hedu.query.join(Course_hedu).filter( Ybc_hedu.bra_id == self.bra_id, Ybc_hedu.course_hedu_id_len == 6, Ybc_hedu.year == self.max_year_query)
def __init__(self, product_id): self._secex = None self._secex_sorted_by_balance = None self._secex_sorted_by_exports = None self._secex_sorted_by_imports = None self.product_id = product_id if product_id is None: self.max_year_query = db.session.query(func.max( Ymp.year)).filter_by(month=12) self.secex_query = Ymp.query.join(Hs).filter( Ymp.month == 0, Ymp.year == self.max_year_query) else: self.max_year_query = db.session.query(func.max( Ymp.year)).filter_by(hs_id=product_id, month=12) self.secex_query = Ymp.query.join(Hs).filter( Ymp.hs_id == self.product_id, Ymp.month == 0, Ymp.year == self.max_year_query)
def __init__(self, bra_id): self._secex = None self._secex_sorted_by_eci = None self.bra_id = bra_id self.max_year_query = db.session.query(func.max(Ymb.year)).filter_by( bra_id=self.bra_id, month=12) self.secex_query = Ymb.query.filter(Ymb.year == self.max_year_query, Ymb.month == 0, func.length(Ymb.bra_id) == 5)
def add_nodes_to_db(structure, boiler_id): last_id = db.session.query(func.max(Node.id)).first()[0] if last_id == None: current_id = 1 else: current_id = last_id + 1 for block in structure: new_block = Node(boiler_id=boiler_id, index=block.get('index'), node_name=block.get('node_name'), id=current_id) db.session.add(new_block) current_id += 1 for child_1 in block.get('children'): new_child_1 = Node(boiler_id=boiler_id, parent_id=new_block.id, index=child_1.get('index'), node_name=child_1.get('node_name'), id=current_id) db.session.add(new_child_1) current_id += 1 for child_2 in child_1.get('children'): new_child_2 = Node(boiler_id=boiler_id, parent_id=new_child_1.id, index=child_2.get('index'), node_name=child_2.get('node_name'), id=current_id) db.session.add(new_child_2) current_id += 1 elements = int(child_2.get("Elements")) points = int(child_2.get("Points")) for element in range(1, elements + 1): for point in range(1, points + 1): new_point = Node(boiler_id=boiler_id, parent_id=new_child_2.id, index=point, node_name='Element ' + str(element) + ' Point ' + str(point), id=current_id) db.session.add(new_point) new_point_norm = Norm(node_id=current_id, default=6.5, minor=6.0, major=5.2, defect=4.5) db.session.add(new_point_norm) current_id += 1 db.session.commit()
def search_to_shelf(shelf_id): shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first() if shelf is None: log.error("Invalid shelf specified: %s", shelf_id) flash(_(u"Invalid shelf specified"), category="error") return redirect(url_for('web.index')) if not check_shelf_edit_permissions(shelf): flash(_( u"You are not allowed to add a book to the the shelf: %(name)s", name=shelf.name), category="error") return redirect(url_for('web.index')) if current_user.id in searched_ids and searched_ids[current_user.id]: books_for_shelf = list() books_in_shelf = ub.session.query( ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).all() if books_in_shelf: book_ids = list() for book_id in books_in_shelf: book_ids.append(book_id.book_id) for searchid in searched_ids[current_user.id]: if searchid not in book_ids: books_for_shelf.append(searchid) else: books_for_shelf = searched_ids[current_user.id] if not books_for_shelf: log.error("Books are already part of %s", shelf) flash(_(u"Books are already part of the shelf: %(name)s", name=shelf.name), category="error") return redirect(url_for('web.index')) maxOrder = ub.session.query(func.max(ub.BookShelf.order)).filter( ub.BookShelf.shelf == shelf_id).first() if maxOrder[0] is None: maxOrder = 0 else: maxOrder = maxOrder[0] for book in books_for_shelf: maxOrder = maxOrder + 1 shelf.books.append( ub.BookShelf(shelf=shelf.id, book_id=book, order=maxOrder)) shelf.last_modified = datetime.utcnow() ub.session.merge(shelf) ub.session.commit() flash(_(u"Books have been added to shelf: %(sname)s", sname=shelf.name), category="success") else: flash(_(u"Could not add books to shelf: %(sname)s", sname=shelf.name), category="error") return redirect(url_for('web.index'))
def get_recs(): # Get company_id from cookie key = current_app.config['SECRET_KEY'] serializer = URLSafeTimedSerializer(secret_key = key) payload = request.cookies.get('CheckMateCookie') cookie_contents = serializer.loads(payload) c_id_from_cookie = int(cookie_contents['company_id']) if request.method == 'GET': recs = Recommendations.query.filter_by(company_id = c_id_from_cookie).order_by(Recommendations.section).all() return jsonify([r.to_dict() for r in recs]) if request.method == 'POST': # Get distinct section numbers from Questions table distinct_sec_nums = [s[0] for s in db.session.query(Questions.section).distinct()] # Max number of rectext entries max_recs = db.session.query(func.max(RecText.section)).one() # Loop through each section for sec in distinct_sec_nums: # Get section name q = Questions.query.filter(Questions.section == sec).first() q_dict = q.to_dict() sec_name = q_dict['section_name'] if sec <= int(max_recs[0]): resp_text, complete = rec_logic(c_id_from_cookie, sec) # If rec exists, update it. if (Recommendations.query.filter(Recommendations.company_id == c_id_from_cookie)\ .filter(Recommendations.section == sec).count() > 0): rec = Recommendations.query.filter(Recommendations.company_id == c_id_from_cookie)\ .filter(Recommendations.section == sec).first() rec.section_name = sec_name rec.rec_text = resp_text rec.completed = complete # Add session db.session.add(rec) else: # Add to recommendations table rcs = Recommendations( company_id = c_id_from_cookie, section = sec, section_name = sec_name, rec_text = resp_text, flagged = 0, completed = complete ) # Add session db.session.add(rcs) # Commit session db.session.commit() recs = Recommendations.query.filter(Recommendations.company_id == c_id_from_cookie).order_by(Recommendations.section).all() return jsonify([r.to_dict() for r in recs])
def get_next_doc_num(self): """ Get the last document number from DB table HardwareUse :return: """ last_doc_num = self.db_session.query(func.max(HardwareUse.doc_num)).scalar() if not last_doc_num: return 1 return last_doc_num + 1
def get_result_num(results): """ CALLED BY: update_labware(), update_increment() RETURNS: Integer (the max integer from result number column in labware test table) """ x = (func.max(results.c.result_number)).scalar() if x == None: return 1 else: return x + 1
def __get_latest_comment_id(self, news_id): session = self.dbUtils.Session() try: res = session.query(func.max( Comments.comment_id)).filter(Comments.news_id == news_id) return res[0][0] if res[0] and res[0][0] and res[0][0] > 0 else 0 except Exception as ex: raise ex finally: session.close()
def get_max_tweet_id(session, username): retval = None row = session.query(func.max(Tweets.tweet_id).label("max")).filter( Tweets.username == username).first() if row: retval = row.max return(retval)
def home(): latest_scrape_ts = session.query(func.max(SkiResort.scrape_ts)) best_resort = session.query(SkiResort)\ .order_by(SkiResort.inches_24_hr.desc())\ .filter(and_(SkiResort.scrape_ts==latest_scrape_ts, SkiResort.open_status==True)).first() # Return template and data return render_template("index.html", best_resort=best_resort)
def new_requester_id(): session = db_session() id = session.query(func.max(Requester.id)).scalar() if not id: id = 0 id += 1 session.add(Requester(id=id)) session.commit() session.close() return id
def set_last_record(self, evt=None): if evt: self._last_processed_id = evt.event_attrs['id'].value else: try: self._last_processed_id = self._tool.query( func.max(self._tool.event_streams.id)).one() except IndexError: # No events yet, so do nothing pass
def __init__(self, product_id, bra_id): self._secex = None self.bra_id = bra_id self.product_id = product_id self.max_year_query = db.session.query(func.max(Ymbp.year)).filter_by( hs_id=product_id, bra_id=bra_id, month=12) self.secex_query = Ymbp.query.filter(Ymbp.hs_id == self.product_id, Ymbp.bra_id == self.bra_id, Ymbp.month == 0, Ymbp.year == self.max_year_query)
async def get_latest_by_external_id(self): max_external_id = self.db.query(func.max( self.table.external_id)).scalar() if max_external_id: obj = (self.db.query(self.table).filter( self.table.external_id == max_external_id).scalar()) return self.serialize(obj) return
def vrati_drzave(): """ vrati sve drzave, ili kreiraj novu """ if flask.request.method == 'GET': drzava_schema = DrzavaSchema(many=True) data = Drzava.query.all() if data: result = drzava_schema.dump(data) return jsonify(result) else: return make_response(jsonify({'error': 'no Drzava found'})), 404 elif flask.request.method == 'POST': drzava_schema = DrzavaSchema() payload = flask.request.get_json() for req in ['naziv', 'id_kontinent']: if req not in payload: return make_response( jsonify({'error': 'Required parameters missing'})), 400 print() ima = Drzava.query.filter(Drzava.naziv == payload['naziv']).first() if not ima: new_id = db.session.query(func.max(Drzava.id)).scalar() new_id += 1 naziv = payload['naziv'] data = Kontinent.query.filter( Kontinent.id == payload['id_kontinent']).first() if data: id_kontinent = payload['id_kontinent'] else: return make_response( jsonify({'error': 'Non-existing id_kontinent'})), 400 if 'opis' in payload: opis = payload['opis'] else: opis = None drzava = Drzava(id=new_id, naziv=naziv, opis=opis, id_kontinent=id_kontinent) print(drzava) db.session.add(drzava) db.session.commit() result = drzava_schema.dump(drzava) return make_response(jsonify(result)), 200 else: return make_response( jsonify({'error': 'Drzava with that naziv already exists'})), 400 else: err = flask.request.method + ' request not allowed' return make_response(jsonify({'error': err})), 405
def __init__(self, db, table, chunksize=1000, skip_columns=()): threading.Thread.__init__(self) self.daemon = True self.db = db self.table = table self.chunksize = chunksize self.skip_columns = skip_columns self.queue = queue.Queue(maxsize=5) self.max_id = db.session().query(func.max(table.columns['id'])).all()[0][0] or 0 self.start()
def _get_next_id(db, entity) -> int: model = type(entity) id = 0 session = db.session session.expire_on_commit = False try: if model is Aisle or model is AisleContains: max_id = session.query(func.max(model.aisle_name)).one_or_none() id = max_id[0] + 1 else: max_id = session.query(func.max(model.id)).one_or_none() id = max_id[0] + 1 except BaseException as e: tb = sys.exc_info() db.app.logger.info(e.with_traceback(tb[2])) raise return id
def fetch_refresh_date(): res = db.session.query(func.max(IndexPrices.date)).one() dt = res[0] if dt: # subtract a day because of the UTC offset dt = dt - timedelta(days=1) formatted_date = datetime.strftime(dt, '%B %-d, %Y') else: formatted_date = "" return formatted_date
def index(): stat_getters = { 'max_addon': lambda: db.session.query(func.max(AddonModel.addon_id)).scalar(), 'max_file': lambda: db.session.query(func.max(FileModel.file_id)).scalar(), 'num_addons': lambda: db.session.query(AddonModel).count(), 'num_files': lambda: db.session.query(FileModel).count(), 'num_authors': lambda: db.session.query(AuthorModel).count(), 'num_history': lambda: db.session.query(HistoricDayRecord).count(), } stats = { k: get_or_create_cached_value('index', k, x, 60 * 60) for k, x in stat_getters.items() } return flask.render_template('index.html', **stats)
def change_deploy_branch(self, value): subq = Package.query.with_entities(Package.branch.label("branch"), func.max(Package.ctime).label("max_ctime"))\ .filter_by(name=self.package_name)\ .group_by(Package.branch).subquery() branches = [ i[0] for i in Package.subquery(subq.c.branch).order_by( subq.c.max_ctime.desc()).limit(20).all() ] value['items'] = make_items(branches) return value
def getLastRunNumber(self, session, experiment_id): if not self.initialized: self._setup() max_trial = session.query(func.max(Trial.run_number)) \ .filter(Trial.experiment_id == experiment_id) \ .first() max_run_number = max_trial[0] if max_run_number == None: return 0 return max_run_number
def jurors(self): court_ids = [self.id] for c in self.children_ids(): court_ids.append(c) juror_data = db.session.query(JurorStake.address, func.max(JurorStake.staking_date)) \ .filter(JurorStake.court_id.in_(court_ids)) \ .group_by(JurorStake.address).all() jurors = [] for j in juror_data: jurors.append(Juror(j[0])) return jurors