def update_book(id): book = Session.query(Book).filter(Book.id == id) if not request.json or not book.first(): abort(400) Session.query(AuthorBookLink).filter( AuthorBookLink.book_id == request.json['id']).delete() authors = request.json['authors_id'] try: book.update({ 'id': request.json['id'], 'title': request.json['title'], 'updated_at': datetime.now() }) except IntegrityError: Session.rollback() return 'Id currently exists', 400 for author_id in authors: author = Session.query(Author).filter(Author.id == author_id) if not author: Session.rollback() return 'No author with id %i' % author_id Session.add( AuthorBookLink(author_id=author_id, book_id=request.json['id'])) Session.commit() return 'OK', 200
def delete_author(id): Session.query(AuthorBookLink).filter( AuthorBookLink.author_id == id).delete() Session.query(Author).filter(Author.id == id).delete() Session.commit() return 'OK', 200
def update_author(id): author = Session.query(Author).filter(Author.id == id) if not request.json or not author.first(): abort(400) connected_books_ids = [] links = Session.query(AuthorBookLink).filter( AuthorBookLink.author_id == author.first().id) for link in links: connected_books_ids.append(link.book_id) links.delete() try: author.update({ 'id': request.json['id'], 'name': request.json['name'], 'updated_at': datetime.now() }) except IntegrityError: Session.rollback() return 'Id already exists', 400 for book_id in connected_books_ids: Session.add( AuthorBookLink(author_id=request.json['id'], book_id=book_id)) Session.commit() return 'OK', 200
def index(): if session.get('logged_in'): return redirect(url_for('main')) new = Session.query(music).order_by(music.id.desc())[0:10] most = Session.query(music).order_by(music.count.desc())[0:10] return render_template('login.html', most=most, new = new)
def addfav(): data = request.get_json() playlist1 = Session.query(playlist).filter(playlist.userid==session['userid']).order_by(playlist.id).first() maxorder = Session.query(func.max(playlist_item.order)).filter(playlist_item.listid == playlist1.id).first()[0] if not maxorder: maxorder = 0 Session.add(playlist_item(listid=playlist1.id, musicid=data['id'], order=maxorder+1)) Session.commit() return "Success"
def albuminfo (): data = request.get_json() album1 = Session.query(album).filter(album.id==data['id']).one() json_data = dict(id = data['id'] ,name=album1.name,singer=album1.singer, url=album1.name+'.jpg', musicnum = album1.music_count) musiclist1=[] for m in Session.query(music).filter(music.albumid==data['id']).order_by(music.num).all(): musiclist1.append(m.diction()) json_data ["musiclist"] =musiclist1 return jsonify(json_data)
async def delete_user(user_id: int, db: Session = Depends(get_db), current_user: User = Depends(get_current_active_user)): try: if not db.query(UserModel).filter(UserModel.id == user_id).first(): raise Exception('Registro no existe') res = db.query(UserModel).filter(UserModel.id == user_id).delete() db.commit() res = successMessage() except Exception as e: return failedMessage(e) return res
def getlist(): data = request.get_json() item = Session.query(playlist_item).filter(playlist_item.listid==data['id']).order_by(playlist_item.order).all() json_data = dict(id = data['id']) musiclist = [] for m in item: mm = m.music.diction() mm["order"] = m.order musiclist.append(mm) json_data ["name"] = Session.query(playlist.name).filter(playlist.id==data['id']).first() json_data ["musiclist"] =musiclist return jsonify(json_data)
def put(self): session = Session() parsed_args = parser.parse_args() subtask_to_change = session.query(SubTask).filter( SubTask.major_task_track_number == parsed_args["tracknumber"]).first() subtask_to_change.status = 2 subtask_to_change.running_machine = '999' machine_to_change = session.query(Machine).filter( Machine.IP == parsed_args["machine"]).first() machine_to_change.status = 0 session.commit() return {"PC and subtask status changed": ''}
async def update_user(user_id: int, User: UserUpdate, db: Session = Depends(get_db), current_user: \ User = Depends(get_current_active_user)): try: if not db.query(UserModel).filter(UserModel.id == user_id).first(): return failedMessage('Registro no existe') res = db.query(UserModel).filter(UserModel.id == user_id).\ update({UserModel.name: User.name, UserModel.email: User.email}) db.commit() model_dict = model2Dict( db.query(UserModel).filter(UserModel.id == user_id).first()) res = UserResponse(**model_dict) except SQLAlchemyError as e: raise Exception(e) return res
def usersocial(id): user1 = Session.query(user).filter(user.id == id).first() if request.method == 'POST': post = True else : post = False group1 = [ dict(name=u"애픽하이"), ] feedlist = Session.query(feed).filter(feed.userid==id).order_by(feed.id.desc()).all() user1 = dict(name=user1.realname, grouplist = group1, profileurl = session['profile']) return render_template('social.html', user = user1, feedlist = feedlist, post=post)
def delete_user(self, user_id): s = Session() del_user = s.query(User).filter(User.id == user_id).one() s.delete(del_user) s.commit() self.signal_delete_user.emit(user_id) s.close()
def post(self): json_data = request.get_json() if not json_data: return {'message': 'No input data provided'}, 400 # Validate and deserialize input try: data, errors = note_schema.load(json_data) if not data.get('title') or not data.get('text'): raise ValidationError('Data not provided') except ValidationError as err: return err.messages, 422 db_session = Session() # Create new note note = Note(id=str(uuid.uuid4()), title=data.get('title'), text=data.get('text'), date_create=int(time.time()), date_update=int(time.time())) db_session.add(note) db_session.commit() result, errors = note_schema.dump( db_session.query(Note).filter(Note.id == note.id).first()) db_session.close() return result, 201
def print_clients(): session = Session() # посмотрим что уже есть в базе данных for instance in session.query(Client).order_by(Client.id): print(instance)
def GetAdsList(self, request, context): session = Session() ads = [ mappers.advertisement_mapper(ad) for ad in session.query(Advertisement).all() ] return car_sale_pb2.AdsListResponse(data=ads)
def print_history(): session = Session() # посмотрим что уже есть в базе данных for instance in session.query(ClientHistory).order_by(ClientHistory.id): print(instance)
def get_announcements(course, url): '''Gets all new announcements Returns a list of all new announcements. ''' session = Session() try: r = s.get('https://edux.pjwstk.edu.pl/Announcements.aspx', stream=True) r.raise_for_status() new_announcements = extract_announcements(r.content) # All pairs of (timestamp, message) are saved to db # if they arent there already for (timestamp, message) in new_announcements: announcement = session.query(Announcement). \ filter_by(course=course, created_at=timestamp, message=message). \ first() if announcement is None: # This is what we care about announcement = Announcement( course=course, created_at=timestamp, message=message) session.add(announcement) print u'New announcement at {0}'.format(timestamp) yield (timestamp, message) session.commit() except Exception: session.rollback() raise finally: session.close()
def parse_metadata(jobid): session = Session() file_obj, job = session.query(File, Job).filter(Job.id == jobid).first() job.status = 'working' session.commit() try: match = re.search(r'(.+)\.([a-z0-9]+)$', file_obj.abs_path.split('/')[-1], re.I) filename = match.group(1) extension = match.group(2) file_type = magic.from_file(file_obj.abs_path) mime_type = magic.from_file(file_obj.abs_path, mime=True) file_obj.name = filename file_obj.ext = extension file_obj.file_type = file_type file_obj.mime_type = mime_type # TODO: Need to figure out why this status isn't getting updated even though the file_obj is and the job stauts # is properly updating to 'working' above. job.staus = 'complete' print('session.dirty: {0}'.format(session.dirty)) session.commit() except: session.rollback() job.status = 'failed' session.commit()
class LeaderParser(object): """ Retrieves most recently cached rankings for the specified league. Provides deferred functions that can be called from other Twisted applications. """ def __init__(self): """Initialize a database session.""" log.msg("Initializing Ubernet Leaderboard parser.") self.session = Session() def top(self, league): """Start an update and return a deferred containing the results.""" updateDeferred = succeed(None) newDeferred = Deferred() league = league.capitalize() def updateDone(value): """Callback method for update.""" entries = (self.session.query( LeaderBoardEntry.uid, UberAccount.dname).outerjoin( UberAccount, UberAccount.uid == LeaderBoardEntry.uid).filter( LeaderBoardEntry.game == "Titans", LeaderBoardEntry.league == league).order_by( LeaderBoardEntry.rank)) newDeferred.callback([e[1] for e in entries]) self.session.close() updateDeferred.addCallback(updateDone) return newDeferred
def load(file_path): if not file_path.endswith('xml'): print('Not an XML file:' + file_path) pass if file_path.endswith('DIY.xml'): author, title, sections = parse_xml(open(file_path)) else: author, title, sections = parse_perseus(open(file_path),'div1') session = Session() a = get_or_create(session, Author, name=author) session.commit() t = get_or_create(session, Text, name=title, author=a.id) session.commit() global_ngrams = session.query(GlobalNgrams).first() section_count = 1 log('Loading: ' + t.name) if not USE_ORIGINAL_DIVISIONS: sections = [' '.join(sections)] if DIVISION_LENGTH: sections = create_chunks(sections[0],DIVISION_LENGTH) for sec in sections: temp_section = get_or_create(session, Section, source_text=t.id, number=section_count) log('Loading section ' + str(section_count)) session.commit() temp_section_content = get_or_create(session, SectionContent, section = temp_section.id, content = sec) log('Creating ngrams of ' + str(section_count)) temp_section_ngrams = get_or_create(session, SectionNgrams, section = temp_section.id, ngrams = generate_ngrams(temp_section_content.content)) log('Updating global ngram counts.') update_global_counts(session, global_ngrams,temp_section_ngrams.ngrams) section_count = section_count + 1 session.commit() update_vector_space(session, global_ngrams)
def del_user_controller(request): user_id = int(request.get('id')) session = Session() for each_user in session.query(User).filter_by(id=user_id): session.delete(each_user) session.commit() return make_response(request, 200, f'User id {user_id} deleted')
def on_return(self, task): """Called by main thread on the return of data from the workers. Post-processing""" logger.info('Retrieved task %s', task.tag) traj = Session.query(models.Trajectory).get(int(task.tag)) try: # save lh5 version of the trajectory conf = load_file(self.project.pdb_topology_file) coordinates = msmbuilder.Trajectory.load_trajectory_file(str(traj.dry_xtc_fn), Conf=conf) save_file(traj.lh5_fn, coordinates) except Exception as e: logger.error('When postprocessing %s, convert to lh5 failed!', traj) logger.exception(e) raise # convert last_wet_snapshot to lh5 pdb_to_lh5(traj, 'last_wet_snapshot_fn') pdb_to_lh5(traj, 'init_pdb_fn') traj.host = task.host traj.returned_time = datetime.now() traj.length = len(coordinates) logger.info('Finished converting new traj to lh5 sucessfully')
def update_listing(id: int, title: str = Form(...), context: str = Form(...), user=Depends(manager), db: database.Session = Depends(database.get_db)): listing = db.query(database.Listing).filter_by(id=id).first() if listing == None: raise HTTPException(status.HTTP_404_NOT_FOUND, detail='Requested listing not found') elif len(title) >= 100: raise HTTPException(status.HTTP_403_FORBIDDEN, detail='Title too long') elif len(context) >= 1000: raise HTTPException(status.HTTP_403_FORBIDDEN, detail='Context too long') elif listing.author == user.username: listing.title = title listing.context = context db.commit() return models.Listing(post_id=listing.id, title=listing.title, author=listing.author, context=listing.context) else: raise HTTPException(status.HTTP_401_UNAUTHORIZED, detail='Not authorized to update this listing')
def delete(self): db_session = Session() json_data = request.get_json() if not json_data: return {'message': 'No input data provided'}, 400 try: data, errors = note_schema.load(json_data) if not data.get('id'): raise ValidationError('Data not provided') except ValidationError as err: db_session.close() return err.messages, 422 try: note = db_session.query(Note).filter( Note.id == data.get('id')).first() if note is None: raise ValidationError('Note is not found') except ValidationError as err: db_session.close() return err.messages, 404 db_session.delete(note) db_session.commit() db_session.close() return {"result": "Note successfully deleted"}
def register_view(self): form = RegistrationForm(request.form) if request.method == 'POST' and form.validate(): user = Users() user.username = form.username.data user.email = form.email.data user.roles = ','.join(form.role.data) # we hash the users password to avoid saving it as plaintext in the db, # remove to use plain text: user.password = generate_password_hash(form.password.data) Session.add(user) Session.commit() login.login_user(user) flash('Thanks for registering') return redirect(url_for('.index')) link = '<p>Already have an account? <a href="' + url_for('.login_view') + '">Click here to log in.</a></p>' form_roles = [] roles = Session.query(Role).all() for role in roles: form_roles.append((role.key, role.name)) form.role.choices = form_roles self._template_args['form'] = form self._template_args['link'] = link return super(MyAdminIndexView, self).index()
def main(): reading_id = int(sys.argv[1]) session = Session() reading = session.query(Reading).filter(Reading.id == reading_id).one() text = reading.text PrependStream(sys.stdout, ' ').write('"' + text.strip() + '"') sys.stdout.write('\n') session.rollback()
def for_name(cls, name, create=False): try: return Session.query(cls).filter_by(name=name).one() except NoResultFound: if create: return returnit(Session.add, cls(name)) else: raise EventNotFound(name)
def login_check(user_name): session = Session() user = session.query(User).filter(or_( User.nickname == user_name)).first() print session session.commit() if not user: return None return user
def attributes(self): return [ x for x, in Session.query(Attribute.key) .join(Attribute.entry) .filter_by(event_id=self.id) .order_by(Attribute.key) .distinct() ]
def index(self, username=None): tableText = '<div class="container">\n\ <table class="table">\n\ <thead>\n\ <tr>\n\ <th>Name</th>\n\ <th>Username</th>\n\ <th>Password</th>\n\ </tr>\n\ </thead>\n\ <tbody>\n' session = Session() if username == None: for user in session.query(User).order_by(User.id): tableText += ( "<tr>\n<td>" + user.fullname + "</td>\n" + "<td>" + user.username + "</td>\n" + "<td>" + user.password + "</td>\n</tr>\n" ) else: for user in session.query(User).filter(User.username.like("%" + username + "%")).order_by(User.id): tableText += ( "<tr>\n<td>" + user.fullname + "</td>\n" + "<td>" + user.username + "</td>\n" + "<td>" + user.password + "</td>\n</tr>\n" ) tableText += "</tbody>\n</table>\n" session.close() return top + tableText + bottom
def main(): if not session.get('logged_in'): return redirect(url_for('index')) if request.method == 'POST': post = True else : post = False albumlist1 = Session.query(album).order_by(album.id).all() return render_template('album_view.html', albumlist=albumlist1, post=post)
def musicstream (): data = request.get_json() music1 = Session.query(music).filter(music.id==data['id']).one() origin = os.getcwd() + "/WebMusicPlayer/static/music/" + music1.filename[:-4] +".flv" #time hash : hashlib.md5(str(datetime.today())).hexdigest() flvfile = unicode(session['userid']) +"_"+hashlib.md5(str(datetime.today())).hexdigest() + ".flv" link = "/tmp/flvs/" + flvfile if os.path.isfile(origin): #subprocess.call(["rm /tmp/flvs/"+ unicode(session['userid']) +"_*.flv"],shell=True) subprocess.call(["ln","-s",origin,link]) json_data = dict (flv = flvfile) Session.query(music).filter(music.id==data['id']).update({"count": music1.count+1}) Session.add(feed(title=session['realname'],text =(session['realname'] + u"님이 "+music1.name+u"을 들었습니다."), userid=session['userid'])) Session.commit() else : json_data = dict (flv = "") print "File Not Found : " + origin return jsonify(json_data)
def add_forcefields_to_db(self, p): if Session.query(models.Forcefield).count() == 0: # add forcefields for ff in p: obj = models.Forcefield(**ff) obj.driver = os.path.join(self.params_dir, ff['driver']) Session.add(obj) Session.commit() else: print "NOTE: I'M NOT PARSING NEW FORCEFIELDS"
def social(): if not session.get('logged_in'): return redirect(url_for('index')) if request.method == 'POST': post = True else : post = False user1 = dict(name=session['realname'], profileurl = session['profile']) feedlist = Session.query(feed).filter(feed.userid==session['userid']).order_by(feed.id.desc()).all() return render_template('social.html', user = user1, feedlist = feedlist, post=post)
def most_frequent_duos(frequent_words): from datamodel import Link from database import Session s=Session() links=s.query(Link) duos={} for l in links: #t=tokenize(l.title) t=mash_post(l) for w in t: if w in frequent_words: for w2 in t: if w!=w2 and w2 in frequent_words: dic_add(duos,frozenset((w,w2))) return [list(s) for s,f in duos.items() if f>10]
def is_sufficient_new_data(): """Is there sufficient new data to build a new round? Returns ------- truth : boolean True if there is sufficient new data for a new round """ qg = Session.query(MSMGroup) qt = Session.query(Trajectory) msmgroup = qg.order_by(MSMGroup.id.desc()).first() if msmgroup is not None: n_built = qt.filter(Trajectory.msm_groups.contains(msmgroup)).count() else: n_built = 0 n_total = qt.filter(Trajectory.returned_time != None).count() truth = n_total >= n_built + Project().num_trajs_sufficient_for_round logger.info("%d trajs total, %d trajs built. Sufficient? %s", n_total, n_built, truth) return truth
def list(): if not session.get('logged_in'): return redirect(url_for('index')) if request.method == 'POST': post = True else : post = False playlist1 = Session.query(playlist).filter(playlist.userid==session['userid']).all() return render_template('list_view.html', playlist = playlist1, post=post)
class Factory(object): def __init__(self, model): self.session = Session() self.model = model def fetch_all(self): return self.session.query(self.model).all() def insert(self, obj, commit=True): self.session.add(obj) if commit: self.session.commit() def commit(self): self.session.commit()
def users(user_id): form = AboutMeForm() session = Session() user = session.query(User).filter(User.id == user_id).first() if not user: flash("The user is not exist.") redirect("/index") blogs = user.posts return render_template( "user2.html", form=form, user=user, blogs=blogs)
def get_folders(course): '''Gets all folders ''' session = Session() try: r = s.get('https://edux.pjwstk.edu.pl/Folder.aspx') r.raise_for_status() new_folders = extract_folders(r.content) for (folder_id, title, start_at, finish_at) in new_folders: folder = session.query(Folder). \ filter_by(folder_id=folder_id). \ first() if folder is None: folder = Folder( folder_id=folder_id, course=course, title=title, start_at=start_at, finish_at=finish_at) send_notify('New folder "{}" at {}'.format(title, course.title), '''Folder title: {0.title} Start at: {0.start_at} Finish at: {0.finish_at}'''.format(folder)) session.add(folder) if (folder.title != title or folder.start_at != start_at or folder.finish_at != finish_at): new = { 'title': title, 'start_at': start_at, 'finish_at': finish_at } send_notify('Folder "{0}" updated'.format(title), '''Folder title: {new[title]} (old: {0.title}) Start at: {new[start_at]} (old: {0.start_at}) Finish at: {new[finish_at]} (old: {0.finish_at})'''.format(folder, new=new)) folder.title = title folder.start_at = start_at folder.finish_at = finish_at session.add(folder) session.commit() finally: session.close()
def login (): loginuser = Session.query(user).filter(user.name == request.form['username']).all() print loginuser if not loginuser : error = 'Invalid username' elif hashlib.md5(request.form['password']).hexdigest() != loginuser[0].pw : error = 'Invalid password' else: session['logged_in'] = True session['userid'] = loginuser[0].id session['username'] = loginuser[0].name session['realname'] = loginuser[0].realname session['profile'] = loginuser[0].profileurl #flash('You were logged in') return redirect(url_for('main')) print error return redirect(url_for('index'))
def joint_clustering(): """Jointly cluster the the data from all of the forcefields Returns ------- generators : msmbuilder.Trajectory """ logger.info('Running joint clustering') # load up all the trajs in the database db_trajs = Session.query(Trajectory).filter(Trajectory.returned_time != None).all() if len(db_trajs) == 0: raise RuntimeError() # load the xyz coordinates from disk for each trajectory load = lambda v: msmbuilder.Trajectory.load_trajectory_file(v) loaded_trjs = [load(t.lh5_fn)[::Project().stride] for t in db_trajs] clusterer = Project().clusterer(trajectories=loaded_trjs) return clusterer.get_generators_as_traj(), db_trajs
def most_frequent_words(): """Return all 'interesting' words""" from datamodel import Link from database import Session s=Session() links=s.query(Link) frequent={} for l in links: #words=tokenize(l.title) words=mash_post(l) for w in words: if frequent.has_key(w): frequent[w]+=1 else: frequent[w]=1 fwords= [(f,w) for w,f in frequent.items() if f>=4 and (f<=20 or w[:4]=="http") and len(w)>1] fwords.sort(reverse=True) maxlength=800 if len(fwords)>maxlength: fwords=fwords[:maxlength] return [w for f,w in fwords]
def main(): reading_id = int(sys.argv[1]) session = Session() bible_query = BibleQuery() reading = session.query(Reading).filter(Reading.id == reading_id).one() text = reading.text if reading.text is not None else "" editor = Editor() # Fix wrong quotation marks text = re.sub(ur'"([a-zA-ZàòùèéìÒÀÙÈÉÌ0-9])', ur'“\1', text, count=0) text = re.sub(ur'([a-zA-ZàòùèéìÒÀÙÈÉÌ0-9\.?!])"', ur'\1”', text, count=0) # From http://stackoverflow.com/questions/15120346/emacs-setting-comment-character-by-file-extension PrependStream(editor.tempfile, '# ').write(u'-*- coding: utf-8; comment-start: "#"; -*-\n') PrependStream(editor.tempfile, '# ').write(u'Quote: %s\n' % (reading.quote)) editor.tempfile.write(u'\n') editor.tempfile.write(text) editor.tempfile.write(u'\n') PrependStream(editor.tempfile, '# ').write(u'Useful characters: “”–\n\n') try: converted_quote = convert_quote_psalm_numbering(reading.quote, False) bible_text = bible_query.get_text(decode_quote(converted_quote, allow_only_chap=True)) except: PrependStream(editor.tempfile, '# ').write(u'Quote: %s\nCould not retrieve bible text\n' % (reading.quote)) print decode_quote(reading.quote, allow_only_chap=True) raise else: bible_text = "\n".join(map(lambda x: x.strip(), bible_text.split('\n'))) PrependStream(editor.tempfile, '# ').write(u'Quote: %s\nConverted quote: %s\nBible text:\n\n%s' % (reading.quote, converted_quote, bible_text)) editor.edit() new_text = u''.join(filter(lambda x: not x.startswith(u'#'), editor.edited_content)).strip() + u'\n' if editor.confirmation_request(new_text != reading.text): reading.text = new_text session.commit() else: session.rollback()
def main(): session = Session() if len(sys.argv[1:]) == 1: mass_id = int(sys.argv[1]) masses = [session.query(Mass).filter(Mass.id == mass_id).one()] elif len(sys.argv[1:]) == 3: year, month, day = map(int, sys.argv[1:]) lit_years = {} lit_date = get_lit_date(datetime.date(year, month, day), lit_years, session) masses = lit_date.get_masses(strict=True) else: print >> sys.stderr, "Wrong number of arguments" sys.exit(1) fout = PrependStream(sys.stdout, '# ') for mass in sorted(masses, key=lambda x: x.order): num_reading = max(map(lambda x: x.order, mass.readings)) + 1 quotes = [] alt_quotes = [] print >> fout, "Mass #%d (%s) in event %s - ID: %d" % (mass.order, mass.title, mass.event.title, mass.id) for reading in sorted(mass.readings, key=lambda x: (x.order, x.alt_num)): print >> fout, " Lettura #%d.%d (%s): %s - ID: %d" % (reading.order, reading.alt_num, reading.title, reading.quote, reading.id) for i in xrange(num_reading): [reading] = filter(lambda x: x.order == i and x.alt_num == 0, mass.readings) if reading.only_on_sunday: alt_quotes[0].append(reading.quote) continue quotes.append(reading.quote) alt_quotes.append(map(lambda x: x.quote, sorted(filter(lambda x: x.order == i and x.alt_num > 0, mass.readings), key=lambda x: x.alt_num))) sys.stdout.write("citazioni: %s\n" % (json.dumps(quotes))) sys.stdout.write("citazioni_alt: %s\n" % (json.dumps(alt_quotes))) session.rollback()