def get_content(self): if self.type == 3: return db.query(m.Post).filter(m.Post.id == self.post_id).first().content else: return db.query(m.Comment).filter(m.Comment.id == self.comment_id).first().content
def txt2db(diretorio): #criando tabelas db.commit() db.query("DROP TABLE IF EXISTS CAGED.CLASSE20") db.query("""CREATE TABLE CAGED.CLASSE20( CO_CLASSE20 INT, CLASSE VARCHAR(200) )""") firstExec = True for linha in codecs.open(diretorio + "/CLASSE20.txt", "r", "latin-1"): dic = {} #leitura do arquivo dados = linha.split('\t') dic["co_classe20"] = int(dados[0]) dic["classe"] = dados[1].strip("\n") db.latin2utf(dic) if firstExec: db.prepareInsert("CLASSE20", "CAGED.CLASSE20", dic) db.commit() firstExec = False db.usePreparedInsert("CLASSE20", dic) db.commit()
def txt2db(diretorio): #criando tabelas db.commit() db.query("DROP TABLE IF EXISTS CAGED.MUNICIPIO") db.query("""CREATE TABLE CAGED.MUNICIPIO( CO_MUNICIPIO INT, UF CHAR(2), MUNICIPIO VARCHAR(40) )""") firstExec = True for linha in codecs.open(diretorio + "/municipio.txt", "r", "latin-1"): dic = {} dados = linha.split('\t') #LEITURA DO ARQUIVO dic["co_municipio"] = int(dados[0]) dic["uf"] = dados[1] dic["municipio"] = dados[2].strip("\n") db.latin2utf(dic) if firstExec: db.prepareInsert("MUNICIPIO", "CAGED.MUNICIPIO", dic) db.commit() firstExec = False db.usePreparedInsert("MUNICIPIO", dic) db.commit()
def show_places(db): from sqlalchemy.orm import joinedload # this line preloads all the places into the cache places = db.query(m.Place).options( joinedload(m.Place.children), joinedload(m.Place.rooms) .load_only(m.Room.id) .subqueryload(m.Room.stats) .load_only(m.RoomStats.adjusted_rating), joinedload(m.Place.rooms) .subqueryload(m.Room.listing_for) .joinedload(m.RoomListing.occupancies) .load_only(m.Occupancy.cancelled), joinedload(m.Place.rooms) .subqueryload(m.Room.listing_for) .subqueryload(m.RoomListing.audience_types) ).all() # which this line then accesses root = db.query(m.Place).get(1) return template('places', location=root, ballot=get_ballot(db))
def get_followeders(self): whoms =\ db.query(m.Follower).order_by(sa.desc(m.Follower.created_at)).filter(m.Follower.who_id == self.id).all() followeders = [] for whom in whoms: followeders += [db.query(User).filter(User.id == whom.whom_id).first()] return followeders
def get(self, pid, id): self.checkAdmin() reply = db.query(Reply).get(id) if reply is None: raise tornado.web.HTTPError(404) post = db.query(Post).get(reply.pid) self.render("replyedit.html", reply=reply, post=post)
def post(self, post_id): user = self.current_user post_id = int(post_id) post = db.query(Post).get(post_id) if not post: raise tornado.web.HTTPError(404) else: origin_content = self.get_argument("comment-content", "") content = md(origin_content) if origin_content == "": self.redirect(self.next_url) return comment = Comment(post_id=post_id, user_id=user.id, content=content, origin_content=origin_content) db.add(comment) db.commit() the_comment = ( db.query(Comment).order_by(sa.desc(Comment.created_at)).filter(Comment.user_id == user.id).first() ) if self.is_ajax(): self.write( tornado.escape.json_encode( { "username": user.name, "avatar": user.get_avatar(size=24), "time": formatDate(int(time.time())), "content": content, } ) ) if post.user_id != user.id: db.add(Notifier(post_id=post_id, who_id=user.id, whom_id=post.user_id, comment_id=the_comment.id)) db.commit() if content.find("@") != -1: put_notifier(comment=comment, post=post)
def get(self, page = 1): page = int(page) count = db.query(Post).count() page_count = (count + config.archive_paged - 1) // config.archive_paged posts = db.query(Post).order_by(sa.desc(Post.created_date)).offset((page - 1) * config.archive_paged).limit(config.archive_paged) self.render("archive.html", posts=posts, formatDate2=formatDate2, page=page, page_count=page_count)
def get_last_ballot(db): """Get the ballot that was run to allocate the current students""" from datetime import date now = date.today() if now.month <= 8: return db.query(m.BallotSeason).get(now.year - 1) else: return db.query(m.BallotSeason).get(now.year)
def get(self, page = 1): page = int(page) count = db.query(Post).count(); page_count = (count + config.paged - 1) // config.paged posts = db.query(Post).order_by(sa.desc(Post.created_date)).offset((page - 1) * config.paged).limit(config.paged) self.render("home.html", posts=posts, getDay=getDay, getMonth=getMonth, \ getAvatar=getAvatar, replyContent=replyContent, formatDate=formatDate, formatDate2=formatDate2, showPost=showPost, page=page, page_count=page_count)
def favorites(self, page=1): favorites =\ db.query(m.Favorite).order_by(sa.desc(m.Favorite.created_at)).filter(m.Favorite.user_id ==\ self.id).all() posts = [] for favorite in favorites: posts += [db.query(m.Post).filter(m.Post.id ==\ favorite.post_id).first()] return posts
def get_favorites(self, page=1): favorites =\ db.query(m.Favorite).order_by(sa.desc(m.Favorite.created_at)).filter(m.Favorite.user_id ==\ self.id).offset((page - 1) * config.paged).limit(config.paged).all() posts = [] for favorite in favorites: posts += [db.query(m.Post).filter(m.Post.id ==\ favorite.post_id).first()] return posts
def get_unread_count(hi, user): if user: nrtimesnap = hi.get_nrtimesnap() followeder_ids = user.get_followeder_ids() count =\ db.query(Post).order_by(sa.desc(Post.created_at)).filter(sa.and_(Post.user_id.in_(followeder_ids), Post.created_at > nrtimesnap, Post.user_id != user.id)).count() else: count =\ db.query(Post).order_by(sa.desc(Post.created_at)).filter(Post.created_at > nrtimesnap).count() return count
def list_archive(page = 1): #print page try: page = int(page) except ValueError: page = 1 #page = int(page) count = db.query(Post).count() page_count = (count + config.archive_paged - 1) // config.archive_paged posts = db.query(Post).order_by(sa.desc(Post.created_date)).offset((page - 1) * config.archive_paged).limit(config.archive_paged) return render_template("archive.html", posts=posts, formatDate2=formatDate2, page=page, page_count=page_count, getAvatar=getAvatar)
def get_posts_and_retweets(self, page=1): retweet_relationships = db.query(m.Retweet).filter(m.Retweet.user_id ==\ self.id).all() retweet_post_ids = [] if retweet_relationships != []: for r in retweet_relationships: retweet_post_ids.append(r.post_id) retweets =\ db.query(m.Post).filter(m.Post.id.in_(retweet_post_ids)).all() posts = db.query(m.Post).filter(m.Post.user_id ==\ self.id).all() + retweets #return posts[((page - 1) * config.paged): (page * config.paged)] return posts
def carrega(arquivo): db.query("CREATE SCHEMA IF NOT EXISTS CAGED") #Caso nao haja tabelas sao carrega as tabelas auxiliares if len(db.buscaTabelas("CAGED")) < 5: caminhoBase = os.path.dirname(os.path.realpath(__file__)) cbo2002.txt2db(caminhoBase) classe10.txt2db(caminhoBase) classe20.txt2db(caminhoBase) municipio.txt2db(caminhoBase) subclasse.txt2db(caminhoBase) dados.txt2db(arquivo)
def delete(query=None): """ This methods deletes the record :param query: :return: """ try: db.query(Users).\ filter_by(**query).\ delete() db.commit() except Exception as e: db.rollback() raise Exception(e.message)
def home(page): #print page try: page = int(page) except ValueError: page = 1 #page = int(page) count = db.query(Post).count(); page_count = (count + config.paged - 1) // config.paged posts = db.query(Post).order_by(sa.desc(Post.created_date)).offset((page - 1) * config.paged).limit(config.paged) recent_replys = db.query(Reply).order_by(sa.desc(Reply.created_date)).limit(6) return render_template("home.html", posts=posts, getDay=getDay, getMonth=getMonth, \ getAvatar=getAvatar, replyContent=replyContent, formatDate=formatDate, formatDate2=formatDate2, showPost=showPost, page=page, page_count=page_count, recent_replys=recent_replys)
def show_occupancies(db): occupancies = db.query(m.Occupancy).join(m.Review).filter(~m.Review.is_newest) return '<br>'.join( '<a href="/occupancies/{0}">#{0} ({1})</a>'.format(o.id, len(o.reviews)) for o in occupancies )
def show_occupancy(occ_id, db): try: occupancy = db.query(m.Occupancy).filter(m.Occupancy.id == occ_id).one() except NoResultFound: raise HTTPError(404, 'Occupancy not found') return template('occupancy', occupancy=occupancy)
def get(self): updated_time = int(time.time()) posts =\ db.query(Post).order_by(sa.desc(Post.created_date)).limit(12) self.set_header("Content-Type", "application/atom+xml") self.render("feed.xml", posts=posts, formatDate2=formatDate2, updated_time=updated_time, config=config)
def add_reply(pid): # name = self.get_argument("reply[name]", default='') # email = self.get_argument("reply[email]", default='') # website = self.get_argument("reply[website]", default='') # origin_content = self.get_argument("reply[content]", default='') name = request.form["reply[name]"] email = request.form["reply[email]"] website = request.form["reply[website]"] origin_content = request.form["reply[content]"] content = markdown.markdown(formatText(origin_content)) if name == "": return redirect("/post/%d" % int(pid), error=u"请填入名字") if email == "": return redirect("/post/%d" % int(pid), error=u"请填入邮箱地址") if origin_content == "": return redirect("/post/%d" % int(pid), error=u"请输入评论内容") number = db.query(Reply).filter(Reply.pid == pid).count() + 1 db.add( Reply( pid=int(pid), name=name, email=email, website=website, content=content, origin_content=origin_content, number=number, ) ) db.commit() base.replyerSet(name, email, website) return redirect("/post/%d" % (int(pid)))
def get_links(): # d3 links are zero-indexed but SQLite starts at 1, # so we can either tell the db to start indexing at 0 (http://stackoverflow.com/q/692856) # or search for nodes by ID in d3 (http://stackoverflow.com/q/23986466) # or just subtract 1 from ID here to generate the proper values in our controlled environment. # This wouldn't work if you're also deleting nodes, but it works fine as a controlled data-generating hack. return [{'source': c.coach_id-1, 'target': c.student_id-1} for c in db.query(Coachingconnection).all()]
def get_timeline(self, timesnap, page): ''' followeder_ids = self.get_followeder_ids() followeder_ids.append(self.id) retweet_relationships =\ db.query(m.Retweet).filter(m.Retweet.user_id.in_(followeder_ids)).all() retweet_post_ids = [] if retweet_relationships != []: for r in retweet_relationships: if r.post_id not in retweet_post_ids: retweet_post_ids.append(r.post_id) retweets =\ db.query(m.Post).filter(m.Post.id.in_(retweet_post_ids)).all() ''' ''' followeders = self.get_followeders() origin_posts = [] #posts = retweets followeders.append(self) if followeders != []: for f in followeders: origin_posts += f.get_posts_and_retweets() posts = {}.fromkeys(origin_posts).keys() posts.sort(lambda p1, p2: -cmp(p1.get_created_in(self), p2.get_created_in(self))) posts = posts[((page - 1) * config.paged) : (page * config.paged)] ''' followeder_ids = self.get_followeder_ids() followeder_ids.append(self.id) posts =\ db.query(m.Post).order_by(sa.desc(m.Post.created_at)).filter(sa.and_(m.Post.user_id.in_(followeder_ids), m.Post.created_at < timesnap)).offset((page - 1) * config.paged).limit(config.paged).all() return posts
def update(query=None, new_user=None): """ This method update the user :param query: :param new_user: :return: """ try: db.query(Users) \ .filter_by(**query) \ .update(new_user) db.commit() except Exception as e: db.rollback() raise Exception(e.message)
def update(query=None, updated_value=None): """ This method update the account :param query: :param updated_value: :return: """ try: db.query(Accounts) \ .filter_by(**query) \ .update(updated_value) db.commit() except Exception as e: db.rollback() raise Exception(e.message)
def get_submissions_from_site(self, limit=None, store_in_db=True): subreddit = self.session.get_subreddit(self.subreddit) # get the newest submission we've previously seen as a stopping point last_submission = ( db.query(Submission) .filter_by(subreddit=self.subreddit) .order_by(Submission.date.desc()) .first() ) seen_ids = set() submissions = [] for submission in subreddit.get_new(limit=limit): submission = Submission(submission) if last_submission: if (submission.id == last_submission.id or submission.date <= last_submission.date): break # somehow there are occasionally duplicates - skip over them if submission.id in seen_ids: continue seen_ids.add(submission.id) submissions.append(submission) if store_in_db: db.add(submission) if store_in_db: db.commit() return submissions
def get_comments_from_site(self, limit=None, store_in_db=True): subreddit = self.session.get_subreddit(self.subreddit) # get the newest comment we've previously seen as a stopping point last_comment = ( db.query(Comment) .filter_by(subreddit=self.subreddit) .order_by(Comment.date.desc()) .first() ) seen_ids = set() comments = [] for comment in subreddit.get_comments(limit=limit): comment = Comment(comment) if last_comment: if (comment.id == last_comment.id or comment.date <= last_comment.date): break # somehow there are occasionally duplicates - skip over them if comment.id in seen_ids: continue seen_ids.add(comment.id) comments.append(comment) if store_in_db: db.add(comment) if store_in_db: db.commit() return comments
def get_followeder_ids(self): whoms =\ db.query(m.Follower).order_by(sa.desc(m.Follower.created_at)).filter(m.Follower.who_id == self.id).all() followeder_ids = [] for whom in whoms: followeder_ids.append(whom.whom_id) return followeder_ids
def save_new_photo_form(db): occ_id = request.forms.occupancy_id if occ_id is None: raise HTTPError(400) try: occupancy = db.query(m.Occupancy).filter(m.Occupancy.id == occ_id).one() except NoResultFound: raise HTTPError(404, "No such occupancy to review") if occupancy.resident != request.user: raise HTTPError(403, "You must have been a resident of a room to review it") uploads = request.files.getall('photo') captions = request.forms.getall('caption') for image_upload, caption in reversed(zip(uploads, captions)): photo = m.Photo.from_file(image_upload.file) photo.caption = caption photo.occupancy = occupancy db.add(photo) return redirect('/rooms/{}#photos'.format(occupancy.listing.room.id))
def get(self, page=1): page = int(page) user = self.current_user if not self.is_ajax(): self.set_timesnap() self.set_nrtimesnap() timesnap = int(time.time()) else: timesnap = self.get_timesnap() if not user: posts =\ db.query(Post).order_by(sa.desc(Post.created_at)).filter(Post.created_at < timesnap).offset((page - 1) * config.paged).limit(config.paged).all() self.render("site/start.html", posts=posts) return else: posts = user.get_timeline(timesnap, page) if self.is_ajax(): self.render("site/ajaxpage.html", posts=posts, page=page) return else: self.render("site/index.html", posts=posts, page=page) return
def post(self, pid): name = self.get_argument("reply[name]", default='') email = self.get_argument("reply[email]", default='') website = self.get_argument("reply[website]", default='') origin_content = self.get_argument("reply[content]", default='') content = format_text(md(format_text(origin_content))) if name == "": self.redirect("/post/%d" % int(pid), error=u"请填入名字") if email == "": self.redirect("/post/%d" % int(pid), error=u"请填入邮箱地址") if origin_content == "": self.redirect("/post/%d" % int(pid), error=u"请输入评论内容") number = db.query(Reply).filter(Reply.pid == pid).count() + 1 db.add( Reply(pid=int(pid), name=name, email=email, website=website, content=content, origin_content=origin_content, number=number)) db.commit() self.set_replyer(name, email, website) self.redirect("/post/%d#%d" % (int(pid), int(number)))
def update_names(db): import utils names = {} initial_names = {} users = db.query(m.Person).all() data_lookup = utils.lookup_ldap(u.crsid for u in users) for u in users: d = data_lookup.get(u.crsid) if d: if d[u'visibleName'] != u.name: # name has changed names[u] = d[u'visibleName'] initial_names[u] = d.get(u'registeredName') else: # crsid doesn't exist! names[u] = None users = set(names.keys()) # crsids didn't match users_unknown = {u for u in users if names[u] is None} users -= users_unknown # names has gone back to initials users_reverted = {u for u in users if names[u] == initial_names[u]} users -= users_reverted return template('users-update', users_unknown=users_unknown, users=users, users_reverted=users_reverted, names=names)
def post_comment_on(self): name = (db.query(Account).filter_by( slack_user=self.slack_user).first()) print name.name try: comment = self.build_comment() except Exception as e: print e self.last_commented = datetime.now() self.num_comments += 1 db.add(self) db.commit() return False # update the database self.last_commented = datetime.now() self.num_comments += 1 db.add(self) db.commit() return { 'name': name.name, 'comment': comment, 'picture': name.special_class }
def book_room(room_id, db): from sqlalchemy.sql import exists token = request.forms.crsf_token if not token or token != request.session.get('crsf_token'): raise HTTPError(403, "Bad CSRF token") try: room = db.query(m.Room).filter(m.Room.id == room_id).one() except NoResultFound: raise HTTPError(404, "No matching room") ballot_event = get_ballot(db) import roombooking try: roombooking.check_then_book(db, request.user, room, ballot_event) except roombooking.BookingError: pass # whatever happens, we redirect back to the room page, which # reevaluates the check and gives the error message to the user about # what went wrong return redirect(utils.url_for(room))
def read(query=None): users = db.query(ManageUsers) \ .filter_by(**query).all() return users
def get_all_champs(): return db.query(Champ).all()
def change_champ(champ_id: int, new_champ: dict): db.query(Champ).filter_by(id=champ_id).update(new_champ) db.commit()
def delete_champ(champ_id: int): champ = db.query(Champ).get(champ_id) db.delete(champ) db.commit()
def get_notifiers(self): notifiers =\ db.query(m.Notifier).order_by(sa.desc(m.Notifier.created_at)).filter(m.Notifier.whom_id == self.id).all() return notifiers
def get(self): user = self.current_user users = db.query(User).order_by( func.rand()).filter(User.id != user.id).limit(9).all() self.render("user/referrers.html", users=users) return
def update(query=None, new_manage_user=None): db.query(ManageUsers) \ .filter_by(**query) \ .update(new_manage_user) db.commit()
def delete(): app.secret_key = flask_session_secret_key method = request.form.get('_method') id = request.args.get('id') if method == 'DELETE': db.query(Collaboration) \ .filter(or_(Collaboration.entity_id1 == id, Collaboration.entity_id2 == id)) \ .delete(synchronize_session='evaluate') db.query(Dataconnection) \ .filter(or_(Dataconnection.giver_id == id, Dataconnection.receiver_id == id)) \ .delete(synchronize_session='evaluate') db.query(Employment) \ .filter(or_(Employment.entity_id1 == id, Employment.entity_id2 == id)) \ .delete(synchronize_session='evaluate') db.query(Revenue).filter(Revenue.entity_id == id).delete( synchronize_session='evaluate') db.query(Expense).filter(Expense.entity_id == id).delete( synchronize_session='evaluate') db.query(Relation) \ .filter(or_(Relation.entity_id1 == id, Relation.entity_id2 == id)) \ .delete(synchronize_session='evaluate') db.query(Fundingconnection) \ .filter(or_(Fundingconnection.giver_id == id, Fundingconnection.receiver_id == id)) \ .delete(synchronize_session='evaluate') db.query(Edit).filter(Edit.entity_id == id).delete( synchronize_session='evaluate') db.execute("DELETE FROM location_table WHERE entity_id=" + id + ";") db.execute("DELETE FROM category_table WHERE entity_id=" + id + ";") db.execute("DELETE FROM keypeople_table WHERE entity_id=" + id + ";") db.query(Entity).filter(Entity.id == id).delete( synchronize_session='evaluate') db.commit() cache.clear() flash("Delete was successful") return redirect('/admin')
def get_peers(): return db.query(Peer).all()
def get_random_peers(limit=10): return db.query(Peer).order_by(func.random()).limit(limit)
def get_posts_amount(self): posts = db.query(m.Post).filter(m.Post.user_id == self.id).all() return len(posts) if posts else 0
def get_posts(self, page=1): posts =\ db.query(m.Post).order_by(sa.desc(m.Post.created_at)).filter(m.Post.user_id ==\ self.id).offset((page - 1) * config.paged).limit(config.paged).all() return posts
def get_origin_post(self): return db.query(Post).get(self.post_id)
def delete(query=None): db.query(ManageUsers). \ filter_by(**query). \ delete() db.commit()
def retweeted_it(self, post): retweet = db.query(m.Post).filter(sa.and_(m.Post.type == 2, m.Post.user_id == self.id, m.Post.post_id == post.id)).first() return retweet
def get_user_count(): return db.query(User).count()
def get_creator_id(product_id, default=None): product = db.query(Product).filter_by(product_id=product_id).first() if product: return product.creator_id return default
def get_config(key, default=None): config = db.query(Config).filter_by(key=key).first() if config: return config.value return default
def get_unread_notifiers(self): notifiers =\ db.query(m.Notifier).order_by(sa.desc(m.Notifier.created_at)).filter(sa.and_(m.Notifier.whom_id == self.id, m.Notifier.status == 0)).all() return notifiers
def get_hostname(identifier): peer = db.query(Peer).filter_by(identifier=identifier).first() return peer.hostname
def faved_it(self, post): favorite =\ db.query(m.Favorite).filter(sa.and_(m.Favorite.user_id == self.id, m.Favorite.post_id == post.id)).first() return favorite
def check_peer(identifier='0', hostname='http://0.0.0.0:0000'): peer = db.query(Peer).filter( or_(Peer.identifier == identifier, Peer.hostname == hostname)).first() if peer: return True return False
def get_retweets(self): return db.query(Post).filter(Post.post_id == self.id).all()
def remove_product(product_id): db.query(Product).filter_by(product_id=product_id).delete() db.commit()
def all() -> List[int]: return [le.channel_id for le in db.query(LogExclude)]