def delete_author(id): Session.query(AuthorBookLink).filter( AuthorBookLink.author_id == id).delete() Session.query(Author).filter(Author.id == id).delete() Session.commit() return 'OK', 200
def update_book(id): book = Session.query(Book).filter(Book.id == id) if not request.json or not book.first(): abort(400) Session.query(AuthorBookLink).filter( AuthorBookLink.book_id == request.json['id']).delete() authors = request.json['authors_id'] try: book.update({ 'id': request.json['id'], 'title': request.json['title'], 'updated_at': datetime.now() }) except IntegrityError: Session.rollback() return 'Id currently exists', 400 for author_id in authors: author = Session.query(Author).filter(Author.id == author_id) if not author: Session.rollback() return 'No author with id %i' % author_id Session.add( AuthorBookLink(author_id=author_id, book_id=request.json['id'])) Session.commit() return 'OK', 200
def update_author(id): author = Session.query(Author).filter(Author.id == id) if not request.json or not author.first(): abort(400) connected_books_ids = [] links = Session.query(AuthorBookLink).filter( AuthorBookLink.author_id == author.first().id) for link in links: connected_books_ids.append(link.book_id) links.delete() try: author.update({ 'id': request.json['id'], 'name': request.json['name'], 'updated_at': datetime.now() }) except IntegrityError: Session.rollback() return 'Id already exists', 400 for book_id in connected_books_ids: Session.add( AuthorBookLink(author_id=request.json['id'], book_id=book_id)) Session.commit() return 'OK', 200
def post(self): json_data = request.get_json() if not json_data: return {'message': 'No input data provided'}, 400 # Validate and deserialize input try: data, errors = note_schema.load(json_data) if not data.get('title') or not data.get('text'): raise ValidationError('Data not provided') except ValidationError as err: return err.messages, 422 db_session = Session() # Create new note note = Note(id=str(uuid.uuid4()), title=data.get('title'), text=data.get('text'), date_create=int(time.time()), date_update=int(time.time())) db_session.add(note) db_session.commit() result, errors = note_schema.dump( db_session.query(Note).filter(Note.id == note.id).first()) db_session.close() return result, 201
def update_password(current_password=Form(...), new_password=Form(...), user=Depends(manager), db: database.Session = Depends(database.get_db)): if not user: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail='Not authenticated') elif len(new_password) < 7: raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail='Password must be 8 characters or more') elif len(new_password) >= 40: raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, detail='Password is too long. Maximum length: 40 characters') elif PasswordStats(new_password).strength() <= float(0.350): raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, detail= 'Password is not strong enough. Try adding some symbols or numbers your password' ) else: db_user = db.query( database.User).filter_by(username=user.username).first() if database.Hash.verify_password(current_password, db_user.password): db_user.password = database.Hash.get_password_hash(new_password) db.commit() db.refresh(db_user) return {'detail': 'Passwored changed'} else: raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail='Current password is incorrect')
def sign_up(): form = SignUpForm() user = User() if form.validate_on_submit(): user_name = request.form.get('user_name') user_email = request.form.get('user_email') register_check = User.login_check(user_name) if register_check: flash("error: The user's name already exists!") return redirect('/sign-up') if len(user_name) and len(user_email): user.nickname = user_name user.email = user_email try: session = Session() session.add(user) session.commit() except: flash("The Database error!") return redirect('/sign-up') flash("Sign up successful!") return redirect('/index') return render_template( "sign_up.html", form=form)
def process_program(self, id: int) -> Optional[str]: session = Session() program = session.query(Program).filter(Program.id == id).one_or_none() if not program: return state_db = Mongo(STATE_COLL) parameters = program.parameters rating = 0 for param in parameters: rating += param['weight'] * int( param['value']) # TODO: type processing program.rating = int(rating) # round score session.add(program) session.commit() state_db.update_one({'key': PROCESS_KEY}, {'$pull': {'planned': id}}) stats_db = Mongo(STATS_COLL) return f'New rating for program <{id}>: {rating}'
def delete(self): db_session = Session() json_data = request.get_json() if not json_data: return {'message': 'No input data provided'}, 400 try: data, errors = note_schema.load(json_data) if not data.get('id'): raise ValidationError('Data not provided') except ValidationError as err: db_session.close() return err.messages, 422 try: note = db_session.query(Note).filter( Note.id == data.get('id')).first() if note is None: raise ValidationError('Note is not found') except ValidationError as err: db_session.close() return err.messages, 404 db_session.delete(note) db_session.commit() db_session.close() return {"result": "Note successfully deleted"}
async def check_queue(): session = Session() # queues = session.query(Queue).filter(Queue.is_pinned is False, Queue.pin_date < datetime.now()).all() queues = session.query(Queue).filter( Queue.is_pinned == False, Queue.pin_date < datetime.now()).all() for queue in queues: try: chat = get_chat(session, queue.chat_id) message = await bot.send_message(queue.chat_id, f"{queue.title}\n\nLine:", reply_markup=get_keyboard(queue)) try: if chat.pin: await bot.pin_chat_message(queue.chat_id, message.message_id) except Exception as e: await bot.send_message(queue.chat_id, "Not enough rights for the pin") queue.is_pinned = True queue.message_id = message.message_id session.commit() except Exception as e: print(e)
def update_stock_master(): """종목 DB 갱신""" try: session = Session() start_time = time.time() stock_master = [ stock[0] for stock in session.query(종목_마스터.거래소코드).all() ] affected_rows = 0 for stock in krx.get_stock_list(): if stock['short_code'].endswith( '0') and stock['full_code'] not in stock_master: session.add( 종목_마스터(stock['marketName'], stock['short_code'][1:], stock['codeName'], stock['full_code'])) affected_rows += 1 if affected_rows > 0: session.commit() slack.send_message( 'BATCH:update_stock_master success {}건 업데이트'.format( affected_rows)) execution_time = time.time() - start_time logger.info('execution_time: {}'.format(execution_time)) logger.info('{} rows added'.format(affected_rows)) except Exception as e: logger.exception(e) slack.send_message('BATCH:update_stock_master fail {}'.format(e)) finally: session.close()
def fit_process(uid): # create a new temporary model file fd, path = tempfile.mkstemp() # close the temporary model file descriptor as we don't need it os.close(fd) # give this process a dedicated session session = Session() try: ftclassifier.fit(session, uid, path) # sgdclassifier.fit(session, uid, path) # persist the model to the database with open(path, 'rb') as f: classifier = f.read() dal.update_classifier(session, uid, classifier) session.commit() except: session.rollback() raise finally: session.close() Session.remove() # delete the temporary model file os.unlink(path)
def del_user_controller(request): user_id = int(request.get('id')) session = Session() for each_user in session.query(User).filter_by(id=user_id): session.delete(each_user) session.commit() return make_response(request, 200, f'User id {user_id} deleted')
def add_user_controller(request): name = request.get('name') password = request.get('password') session = Session() session.add(User(name=name, password=password)) session.commit() return make_response(request, 200, f'User {name} created')
def delete_user(self, user_id): s = Session() del_user = s.query(User).filter(User.id == user_id).one() s.delete(del_user) s.commit() self.signal_delete_user.emit(user_id) s.close()
async def callback_del_handler(callback: CallbackQuery): queue_id = int(callback.data.split("-")[1]) session = Session() record = session.query(QueueRecord).filter( QueueRecord.queue_id == queue_id, QueueRecord.user_id == callback.from_user.id).first() if not record: session.close() await bot.answer_callback_query(callback.id, "You are not in the list") return user = get_user(session, callback.from_user) queue = session.query(Queue).filter(Queue.id == queue_id).first() record.remove_record() session.commit() text = f"{queue.title}\n\nLine:" for record in session.query(QueueRecord).filter( QueueRecord.queue_id == queue_id).all(): text += f"\n{record.position}. {record.user.user_name}" await bot.answer_callback_query(callback.id, "Left the line") await bot.edit_message_text(text, queue.chat_id, queue.message_id, reply_markup=get_keyboard(queue)) session.close()
async def create_handler(message: Message): title = message.text[8:] if title.replace(" ", "") == "": await message.reply("Empty title. Use /create [text]\n", reply=False) return session = Session() time = datetime.now() chat = get_chat(session, message.chat.id) delta = timedelta(minutes=chat.default_time) time += delta seconds = timedelta(time.second) time -= seconds queue = Queue(creator_id=message.from_user.id, message_id=message.message_id, pin_date=time, title=title, chat_id=message.chat.id) session.add(queue) session.commit() session.close() await message.reply( f"{title}\n\nPublication time: {time.strftime('%H:%M, %d.%m.%Y')}", reply=False)
def on_return(self, task): """Called by main thread on the return of data from the workers. Post-processing""" logger.info("Retrieved task %s", task.tag) traj = Session.query(models.Trajectory).get(int(task.tag)) try: # save lh5 version of the trajectory conf = load_file(self.project.pdb_topology_file) coordinates = msmbuilder.Trajectory.load_trajectory_file(str(traj.dry_xtc_fn), Conf=conf) save_file(traj.lh5_fn, coordinates) except Exception as e: logger.error("When postprocessing %s, convert to lh5 failed!", traj) logger.exception(e) raise # convert last_wet_snapshot to lh5 pdb_to_lh5(traj, "last_wet_snapshot_fn") pdb_to_lh5(traj, "init_pdb_fn") traj.host = task.host traj.returned_time = datetime.now() traj.length = len(coordinates) Session.flush() Session.commit() logger.info("Finished converting new traj to lh5 sucessfully")
def populate(): """ Populates the database for the given user with sample data. """ try: id_token = request.form['id_token'] uid = verify_id_token(id_token) except KeyError: return "id_token required", status.HTTP_400_BAD_REQUEST except ValueError: return "id_token unrecognized", status.HTTP_400_BAD_REQUEST except auth.AuthError as exc: if exc.code == 'ID_TOKEN_REVOKED': return "id_token revoked", status.HTTP_400_BAD_REQUEST else: return "id_token invalid", status.HTTP_400_BAD_REQUEST session = Session() try: dal.populate(session, uid) session.commit() except: session.rollback() raise finally: session.close() Session.remove() classifier.fit(uid) return "Sample data added for user", status.HTTP_202_ACCEPTED
def toggle_task(update: Update, context: CallbackContext) -> None: user_id = update.callback_query.from_user.id callback_query = update.callback_query message_keyboard_markup = callback_query.message.reply_markup.inline_keyboard event, task_id = callback_query.data.split(":") for i in range(len(message_keyboard_markup)): if message_keyboard_markup[i][0].callback_data == callback_query.data: session = Session() user_task = session.query(UserTask)\ .filter(UserTask.id == task_id)\ .filter(UserTask.tg_user_id == user_id)\ .first() if user_task is not None: user_task.completed = not user_task.completed message_keyboard_markup[i][0] = \ InlineKeyboardButton(format.task(user_task), callback_data=f"UserTask_toggle_click:{user_task.id}") session.commit() else: del message_keyboard_markup[i] session.close() break context.bot.edit_message_reply_markup( chat_id=update.effective_chat.id, message_id=update.callback_query.message.message_id, reply_markup=InlineKeyboardMarkup(message_keyboard_markup)) update.callback_query.answer()
def stats(): """ Get a list of all the given user's stats. """ try: id_token = request.form['id_token'] uid = verify_id_token(id_token) except KeyError: return "id_token required", status.HTTP_400_BAD_REQUEST except ValueError: return "id_token unrecognized", status.HTTP_400_BAD_REQUEST except auth.AuthError as exc: if exc.code == 'ID_TOKEN_REVOKED': return "id_token revoked", status.HTTP_400_BAD_REQUEST else: return "id_token invalid", status.HTTP_400_BAD_REQUEST session = Session() try: stats = dal.get_stats(session, uid) session.commit() except: session.rollback() raise finally: session.close() Session.remove() return json.dumps(stats), status.HTTP_200_OK
def reset(): """ Deletes all of the user's data from the database. """ try: id_token = request.form['id_token'] uid = verify_id_token(id_token) except KeyError: return "id_token required", status.HTTP_400_BAD_REQUEST except ValueError: return "id_token unrecognized", status.HTTP_400_BAD_REQUEST except auth.AuthError as exc: if exc.code == 'ID_TOKEN_REVOKED': return "id_token revoked", status.HTTP_400_BAD_REQUEST else: return "id_token invalid", status.HTTP_400_BAD_REQUEST session = Session() try: dal.delete(session, uid) session.commit() except: session.rollback() raise finally: session.close() Session.remove() return "User data deleted", status.HTTP_202_ACCEPTED
def predict(): """ Predicts the text label of every value in the given list of unlabeled text. """ try: id_token = request.form['id_token'] uid = verify_id_token(id_token) except KeyError: return "id_token required", status.HTTP_400_BAD_REQUEST except ValueError: return "id_token unrecognized", status.HTTP_400_BAD_REQUEST except auth.AuthError as exc: if exc.code == 'ID_TOKEN_REVOKED': return "id_token revoked", status.HTTP_400_BAD_REQUEST else: return "id_token invalid", status.HTTP_400_BAD_REQUEST try: unlabeled_text = json.loads(request.form['unlabeled_text']) except KeyError: return "unlabeled_text required", status.HTTP_400_BAD_REQUEST except ValueError: return "unlabeled_text unrecognized", status.HTTP_400_BAD_REQUEST session = Session() try: predicted_labels = classifier.predict(session, uid, list(unlabeled_text.values())) session.commit() except: session.rollback() raise finally: session.close() Session.remove() predictions = dict(zip(unlabeled_text.keys(), predicted_labels)) return json.dumps(predictions), status.HTTP_200_OK
def main(): Base.metadata.create_all(engine) session = Session() video_fetcher = VideoFetcher('a', 10) video_fetcher.get_new_views_for_existing_videos(session) video_fetcher.get_new_videos(session) session.commit()
async def timer_empty_handler(message: Message): session = Session() chat = get_chat(session, message.chat.id) time = chat.default_time session.commit() session.close() await message.reply(f"Times: {time} minutes", reply=False)
def add_user(): try: # generate database schema Base.metadata.create_all(engine) # create a new session session = Session() name = request.args.get('name') age = request.args.get('age') dni = request.args.get('dni') ctry = request.args.get('country') # create an employee user = User(name, age, dni, ctry) # persists data session.add(user) # commit and close session session.commit() session.close() return 'User added' except Exception as error: return str(error)
async def iterate_companies(self, session: database.Session): for index_company, company in enumerate(session.query(Company).all()): res = company.iterate() if res: self.stock_increase.append(True) shares = session.query( database.Shares).filter_by(company_id=company.id) for share in shares: user = session.query(User).get(share.user_id) cost = user.passive_income(company=company, session=session) # cost = math.ceil(share.amount*company.stock_price) # user = session.query(User).get(share.user_id) # income_percent = 0.10 # total_shares = sum(share.amount for share in user.shares if share.company_id == company.id)*company.stock_price # income_percent -= (total_shares/5_000)/100 # cost = math.ceil(cost * max(income_percent, 0.01)) await self.api.upgraded_add_points(user, cost, session) session.commit() else: if not company.abbv == 'DFLT': company.bankrupt = True self.names[company.abbv] = company.full_name session.commit() self.start_company_events(session)
async def timer_handler(message: Message): def check_int(number): return bool(re.match("^[-+]?[0-9]+$", number)) arg = message.text.split(' ') if len(arg) < 2 or arg[1].replace(' ', '') == '' or check_int(arg[1]) is False: await message.reply( "Incorrect input\n" "/timer [mins], mins >= 0, mins <= sys.maxint", reply=False) return if sys.maxsize <= int(arg[1]): await message.reply("Number should not be larger than int", reply=False) return if int(arg[1]) < 0: await message.reply("Number should not be less than 0", reply=False) return session = Session() chat = get_chat(session, message.chat.id) chat.default_time = int(arg[1]) session.commit() session.close() await message.reply(f"Timer is set to {int(arg[1])} minutes", reply=False)
def insert_test_values(): from database import Session from database.objects import User, Address, BlogPost, Keyword from random import randint letters = "abcdefghijklmnopqrstuvwxyz" s = Session() keyword = Keyword("firstpost") for n in range(100): name = "" for l in range(randint(4, 10)): letter = letters[randint(0, len(letters) - 1)] name += letter user = User(name=name, fullname=name, password="******") user.addresses = [ Address(email_address=("*****@*****.**" % name)), Address(email_address=("*****@*****.**" % name)), ] post = BlogPost(("%ss Blog Post" % name), "This is a test", user) post.keywords.append(Keyword(name)) post.keywords.append(keyword) s.add(post) s.commit() s.close()
def count(id): logger.info(f'Adding task for id: {id}') session = Session() task = session.query(Tasks).filter_by(id=id).first() res = Results(address=task.address, words_count=0, http_status_code=0) try: scrpr = Scrapper(task.address) except: scrpr = None if scrpr: err = scrpr.get_page() if not err: task.http_status_code, matches = scrpr.count_matches() task.task_status = 'FINISHED' res = Results(address=task.address, words_count=matches, http_status_code=task.http_status_code) else: print(err) session.add(res) session.commit() logger.info(task) logger.info(res)
def get_announcements(course, url): '''Gets all new announcements Returns a list of all new announcements. ''' session = Session() try: r = s.get('https://edux.pjwstk.edu.pl/Announcements.aspx', stream=True) r.raise_for_status() new_announcements = extract_announcements(r.content) # All pairs of (timestamp, message) are saved to db # if they arent there already for (timestamp, message) in new_announcements: announcement = session.query(Announcement). \ filter_by(course=course, created_at=timestamp, message=message). \ first() if announcement is None: # This is what we care about announcement = Announcement( course=course, created_at=timestamp, message=message) session.add(announcement) print u'New announcement at {0}'.format(timestamp) yield (timestamp, message) session.commit() except Exception: session.rollback() raise finally: session.close()
async def callback_add_handler(callback: CallbackQuery): queue_id = int(callback.data.split("-")[1]) session = Session() record = session.query(QueueRecord).filter( QueueRecord.queue_id == queue_id, QueueRecord.user_id == callback.from_user.id).first() if record: session.close() await bot.answer_callback_query(callback.id, "You are already in the list") return queue = session.query(Queue).filter(Queue.id == queue_id).first() position = len( session.query(QueueRecord).filter( QueueRecord.queue_id == queue_id).all()) + 1 user = get_user(session, callback.from_user) session.add( QueueRecord(queue_id=queue_id, user_id=callback.from_user.id, position=position)) session.commit() text = f"{queue.title}\n\nLine:" for record in session.query(QueueRecord).filter( QueueRecord.queue_id == queue_id).all(): text += f"\n{record.position}. {record.user.user_name}" await bot.answer_callback_query(callback.id, "Entered") await bot.edit_message_text(text, queue.chat_id, queue.message_id, reply_markup=get_keyboard(queue)) session.close()
def send_message(recipent=Form(...), header=Form(...), body=Form(...), user=Depends(manager), db: database.Session = Depends(database.get_db)): if not user: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail='Not authenticated') elif len(header) >= 25: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail='Message header cannot be more than 100 characters') elif len(body) >= 1000: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detial='Message body cannot be more than 1000 characters') else: to = db.query(database.User).filter_by(username=recipent).first() if to == None: raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail='Recipent of message not found') else: db_message = database.Messages(sent_at=time.time(), sent_by=user.username, recipent=recipent, header=header, body=body) db.add(db_message) db.commit() return models.Message(sent_at=time.time(), sent_by=user.username, recipent=recipent, header=header, body=body)
def update_listing(id: int, title: str = Form(...), context: str = Form(...), user=Depends(manager), db: database.Session = Depends(database.get_db)): listing = db.query(database.Listing).filter_by(id=id).first() if listing == None: raise HTTPException(status.HTTP_404_NOT_FOUND, detail='Requested listing not found') elif len(title) >= 100: raise HTTPException(status.HTTP_403_FORBIDDEN, detail='Title too long') elif len(context) >= 1000: raise HTTPException(status.HTTP_403_FORBIDDEN, detail='Context too long') elif listing.author == user.username: listing.title = title listing.context = context db.commit() return models.Listing(post_id=listing.id, title=listing.title, author=listing.author, context=listing.context) else: raise HTTPException(status.HTTP_401_UNAUTHORIZED, detail='Not authorized to update this listing')
def register_view(self): form = RegistrationForm(request.form) if request.method == 'POST' and form.validate(): user = Users() user.username = form.username.data user.email = form.email.data user.roles = ','.join(form.role.data) # we hash the users password to avoid saving it as plaintext in the db, # remove to use plain text: user.password = generate_password_hash(form.password.data) Session.add(user) Session.commit() login.login_user(user) flash('Thanks for registering') return redirect(url_for('.index')) link = '<p>Already have an account? <a href="' + url_for('.login_view') + '">Click here to log in.</a></p>' form_roles = [] roles = Session.query(Role).all() for role in roles: form_roles.append((role.key, role.name)) form.role.choices = form_roles self._template_args['form'] = form self._template_args['link'] = link return super(MyAdminIndexView, self).index()
def submit(self, traj): """ Submit a job to the work-queue for further sampling. Parameters ---------- """ if traj.submit_time is not None: raise ValueError("This traj has already been submitted") Session.add(traj) Session.flush() traj.populate_default_filenames() if not hasattr(traj, "init_pdb"): raise ValueError("Traj is supposed to have a pdb object tacked on") save_file(traj.init_pdb_fn, traj.init_pdb) remote_driver_fn = os.path.split(str(traj.forcefield.driver))[1] remote_pdb_fn = "input.pdb" remote_output_fn = "production_dry{}".format(traj.forcefield.output_extension) if traj.mode is None or traj.forcefield is None: raise ValueError("malformed traj") task = Task( "python ./{driver} {pdb_fn} {ff} {water} {mode} {threads}".format( pdb_fn=remote_pdb_fn, mode=traj.mode, driver=remote_driver_fn, ff=traj.forcefield.name, water=traj.forcefield.water, threads=traj.forcefield.threads, ) ) # why does traj.forcefield.driver come out as unicode? task.specify_input_file(str(traj.forcefield.driver), remote_driver_fn) task.specify_output_file(traj.wqlog_fn, "logs/driver.log") task.specify_input_file(traj.init_pdb_fn, remote_pdb_fn) task.specify_output_file(traj.dry_xtc_fn, remote_output_fn) if self.return_wet_xtc: # this is the XTC file with waters, generated by the driver # when you're doing implicit solvent only, this stuff is not used. remote_wet_output_fn = "production_wet{}".format(traj.forcefield.output_extension) task.specify_output_file(traj.wet_xtc_fn, remote_wet_output_fn) task.specify_output_file(traj.last_wet_snapshot_fn, "last_wet_snapshot.pdb") else: logger.debug("Not requesting production_wet%s from driver (implicit)", traj.forcefield.output_extension) task.specify_tag(str(traj.id)) task.specify_algorithm(WORK_QUEUE_SCHEDULE_FILES) # what does this do? traj.submit_time = datetime.now() # need to do a commit from this the qmaster, since this is a different # session Session.commit() self.wq.submit(task) logger.info("Submitted to queue: %s", traj)
def setup_base(): Base.metadata.drop_all(bind=db, tables=[User.__table__, Address.__table__]) Base.metadata.create_all(db) session = Session() change_user = User(name="Mike", age=44) session.add(change_user) session.commit() session.close()
def echo_controller(request): data = request.get('data') session = Session() message = Message(data=data.get('text')) session.add(message) session.commit() session.close() return make_response(request, 200, data)
def addfav(): data = request.get_json() playlist1 = Session.query(playlist).filter(playlist.userid==session['userid']).order_by(playlist.id).first() maxorder = Session.query(func.max(playlist_item.order)).filter(playlist_item.listid == playlist1.id).first()[0] if not maxorder: maxorder = 0 Session.add(playlist_item(listid=playlist1.id, musicid=data['id'], order=maxorder+1)) Session.commit() return "Success"
def index(self, fullname=None, username=None, password=None): if cherrypy.request.method == "POST": session = Session() new_user = User(username, fullname, password) session.add(new_user) session.commit() session.close() raise cherrypy.HTTPRedirect("/")
def save_cate(cate_name,cate_id): try: category = Categories(category_name=cate_name,category_id=cate_id) Session.add(category) Session.commit() except Exception as e: Session.rollback() raise e
def login_check(user_name): session = Session() user = session.query(User).filter(or_( User.nickname == user_name)).first() print session session.commit() if not user: return None return user
def save_album(album_name,album_id,album_cover): try: category = Albums(album_name=album_name,album_id=album_id,album_cover=album_cover) Session.add(category) Session.commit() except Exception as e: Session.rollback() raise e
def add_user(self, name, address): if not (name == '' or address == ''): s = Session() u = User(name=name, fullname='%s test' % name, password='******') u.addresses = [Address(email_address=address), ] s.add(u) s.commit() self.send_user(u) s.close()
def save_wallpaper(title, file_name, image_url, id=None, category=None, album_id=None, album_name=None ,username=None): try: wallpaper = Wallpapers(title=title, image_url=image_url, id=id, category_id=category, user_id=username, album_id=album_id, album_name=album_name) Session.add(wallpaper) Session.commit() except Exception as e: Session.rollback() raise e
def add_forcefields_to_db(self, p): if Session.query(models.Forcefield).count() == 0: # add forcefields for ff in p: obj = models.Forcefield(**ff) obj.driver = os.path.join(self.params_dir, ff['driver']) Session.add(obj) Session.commit() else: print "NOTE: I'M NOT PARSING NEW FORCEFIELDS"
def load(file_path): if not file_path.endswith('xml'): print('Not an XML file:' + file_path) pass if file_path.endswith('DIY.xml'): author, title, sections = parse_xml(open(file_path)) else: author, title, sections = parse_perseus(open(file_path),'div1') session = Session() a = get_or_create(session, Author, name=author) session.commit() t = get_or_create(session, Text, name=title, author=a.id) session.commit() global_ngrams = session.query(GlobalNgrams).first() section_count = 1 log('Loading: ' + t.name) if not USE_ORIGINAL_DIVISIONS: sections = [' '.join(sections)] if DIVISION_LENGTH: sections = create_chunks(sections[0],DIVISION_LENGTH) for sec in sections: temp_section = get_or_create(session, Section, source_text=t.id, number=section_count) log('Loading section ' + str(section_count)) session.commit() temp_section_content = get_or_create(session, SectionContent, section = temp_section.id, content = sec) log('Creating ngrams of ' + str(section_count)) temp_section_ngrams = get_or_create(session, SectionNgrams, section = temp_section.id, ngrams = generate_ngrams(temp_section_content.content)) log('Updating global ngram counts.') update_global_counts(session, global_ngrams,temp_section_ngrams.ngrams) section_count = section_count + 1 session.commit() update_vector_space(session, global_ngrams)
class Factory(object): def __init__(self, model): self.session = Session() self.model = model def fetch_all(self): return self.session.query(self.model).all() def insert(self, obj, commit=True): self.session.add(obj) if commit: self.session.commit() def commit(self): self.session.commit()
def get_folders(course): '''Gets all folders ''' session = Session() try: r = s.get('https://edux.pjwstk.edu.pl/Folder.aspx') r.raise_for_status() new_folders = extract_folders(r.content) for (folder_id, title, start_at, finish_at) in new_folders: folder = session.query(Folder). \ filter_by(folder_id=folder_id). \ first() if folder is None: folder = Folder( folder_id=folder_id, course=course, title=title, start_at=start_at, finish_at=finish_at) send_notify('New folder "{}" at {}'.format(title, course.title), '''Folder title: {0.title} Start at: {0.start_at} Finish at: {0.finish_at}'''.format(folder)) session.add(folder) if (folder.title != title or folder.start_at != start_at or folder.finish_at != finish_at): new = { 'title': title, 'start_at': start_at, 'finish_at': finish_at } send_notify('Folder "{0}" updated'.format(title), '''Folder title: {new[title]} (old: {0.title}) Start at: {new[start_at]} (old: {0.start_at}) Finish at: {new[finish_at]} (old: {0.finish_at})'''.format(folder, new=new)) folder.title = title folder.start_at = start_at folder.finish_at = finish_at session.add(folder) session.commit() finally: session.close()
def load(file_): nvdxml = utilities.NvdXml() session = Session() reader = csv.reader(file_) next(reader, None) # Ignoring the header for row in reader: debug(row) cve = Cve(id=row[0], year=utilities.get_year(row[0]), product=row[1]) nvd_details = nvdxml.get_details(cve.id) if nvd_details: cve.cvss = Cvss() cve.cvss.access_complexity = nvd_details['access-complexity'] cve.cvss.access_vector = nvd_details['access-vector'] cve.cvss.authentication = nvd_details['authentication'] cve.cvss.availability_impact = nvd_details['availability-impact'] cve.cvss.confidentiality_impact = nvd_details[ 'confidentiality-impact' ] cve.cvss.integrity_impact = nvd_details['integrity-impact'] cve.cvss.score = nvd_details['score'] cve.cvss.exploitability_subscore = nvd_details[ 'exploitability-subscore' ] cve.cvss.impact_subscore = nvd_details[ 'impact-subscore' ] cve.bounty = Bounty() cve.bounty.amount = float(row[2].replace('$', '').replace(',', '')) session.add(cve) try: session.commit() except sqlalchemy.exc.IntegrityError as e: error('{} is a duplicate.'.format(cve.id)) session.rollback() else: warning('{} was not found in NVD.'.format(cve.id))
def main(): reading_id = int(sys.argv[1]) session = Session() bible_query = BibleQuery() reading = session.query(Reading).filter(Reading.id == reading_id).one() text = reading.text if reading.text is not None else "" editor = Editor() # Fix wrong quotation marks text = re.sub(ur'"([a-zA-ZàòùèéìÒÀÙÈÉÌ0-9])', ur'“\1', text, count=0) text = re.sub(ur'([a-zA-ZàòùèéìÒÀÙÈÉÌ0-9\.?!])"', ur'\1”', text, count=0) # From http://stackoverflow.com/questions/15120346/emacs-setting-comment-character-by-file-extension PrependStream(editor.tempfile, '# ').write(u'-*- coding: utf-8; comment-start: "#"; -*-\n') PrependStream(editor.tempfile, '# ').write(u'Quote: %s\n' % (reading.quote)) editor.tempfile.write(u'\n') editor.tempfile.write(text) editor.tempfile.write(u'\n') PrependStream(editor.tempfile, '# ').write(u'Useful characters: “”–\n\n') try: converted_quote = convert_quote_psalm_numbering(reading.quote, False) bible_text = bible_query.get_text(decode_quote(converted_quote, allow_only_chap=True)) except: PrependStream(editor.tempfile, '# ').write(u'Quote: %s\nCould not retrieve bible text\n' % (reading.quote)) print decode_quote(reading.quote, allow_only_chap=True) raise else: bible_text = "\n".join(map(lambda x: x.strip(), bible_text.split('\n'))) PrependStream(editor.tempfile, '# ').write(u'Quote: %s\nConverted quote: %s\nBible text:\n\n%s' % (reading.quote, converted_quote, bible_text)) editor.edit() new_text = u''.join(filter(lambda x: not x.startswith(u'#'), editor.edited_content)).strip() + u'\n' if editor.confirmation_request(new_text != reading.text): reading.text = new_text session.commit() else: session.rollback()
def musicstream (): data = request.get_json() music1 = Session.query(music).filter(music.id==data['id']).one() origin = os.getcwd() + "/WebMusicPlayer/static/music/" + music1.filename[:-4] +".flv" #time hash : hashlib.md5(str(datetime.today())).hexdigest() flvfile = unicode(session['userid']) +"_"+hashlib.md5(str(datetime.today())).hexdigest() + ".flv" link = "/tmp/flvs/" + flvfile if os.path.isfile(origin): #subprocess.call(["rm /tmp/flvs/"+ unicode(session['userid']) +"_*.flv"],shell=True) subprocess.call(["ln","-s",origin,link]) json_data = dict (flv = flvfile) Session.query(music).filter(music.id==data['id']).update({"count": music1.count+1}) Session.add(feed(title=session['realname'],text =(session['realname'] + u"님이 "+music1.name+u"을 들었습니다."), userid=session['userid'])) Session.commit() else : json_data = dict (flv = "") print "File Not Found : " + origin return jsonify(json_data)
def dellistitem(): data = request.get_json() Session.query(playlist_item).filter(playlist_item.order == data['order']).filter(playlist_item.listid==data['listid']).delete() Session.commit() return "Success"
def run(): channels = Session.query(Channel).all() channel_map = {} config['CHAN'] = [] for channel in channels: config['CHAN'].append(channel.name) channel_map[channel.name] = channel.id s = None try: s = socket.socket() s.connect((config['HOST'], config['PORT'])) s.send("NICK %s\r\n" % config['NICK']) s.send("USER %s %s bla :%s\r\n" % (config['IDEN'], config['HOST'], config['REAL'])) buf = "" flag = 1 # flag for connected loop # Operate until it tells us we have connected while flag: buf = buf + s.recv(1024) tmp = string.split(buf, "\n") buf = tmp.pop() for line in tmp: line = string.rstrip(line) line = string.split(line) if line[0] == "PING": s.send("PONG %s\r\n" % line[1]) if line[1] == "004": for chan in config['CHAN']: s.send("JOIN %s\r\n" % chan) flag = False # Loop break # Operate whenever socket received while 1: buf = buf + s.recv(1024) tmp = string.split(buf, "\n") buf = tmp.pop() for line in tmp: line = string.rstrip(line) line = string.split(line) # print line # Pong when Ping arrived if line[0] == "PING": s.send("PONG %s\r\n" % line[1]) # Message from Channels if line[1] == "PRIVMSG": sender = string.split(line[0], "!")[0][1:] # User who told obj = line[2] # Channel(User) where(who) message happened(received) msg = " ".join(line[3:])[1:] # Message if obj == config['NICK']: pass else: log = Log(sender, channel_map[obj], msg, datetime.now()) Session.add(log) Session.commit() for m in msg.split(' '): tmp = urlparse(m) if tmp.scheme in ['http', 'https', 'ftp']: link = Link(log, tmp.geturl()) Session.add(link) Session.commit() finally: s.close()
def import_from_scrape(year, month): lit_years = {} session = Session() for day in real_itermonthdays(year, month): date = datetime.date(year, month, day) print >> sys.stderr, "Importing %s..." % (date) lit_date = get_lit_date(date, lit_years, session) # Check if we already have a mass here try: lit_date.get_masses(strict=False) except SelectingMassException: pass else: print >> sys.stderr, " * skipping because a valid mass already exists" continue winner = lit_date.get_winner(remove_ok=True) if winner is None: print >> sys.stderr, " * skipping because there are no masses today" continue event = winner[1] with open(os.path.join('scrape', '%04d-%02d-%02d.html' % (year, month, day))) as fhtml: quotes = scrape_file(fhtml) if u'auto' not in event.status.split(u' '): event.status += u' auto' mass = Mass() mass.order = 0 mass.event = event mass.digit = lit_date.digit mass.letter = lit_date.letter mass.title = None mass.status = u'auto' session.add(mass) order = 0 if len(quotes) == 4: titles = [u'Prima lettura', u'Salmo responsoriale', u'Seconda lettura', u'Vangelo'] elif len(quotes) == 3: titles = [u'Prima lettura', u'Salmo responsoriale', u'Vangelo'] # Domenica delle Palme elif len(quotes) == 5: titles = [u'Vangelo delle Palme', u'Prima lettura', u'Salmo responsoriale', u'Seconda lettura', u'Vangelo'] # Pasqua elif len(quotes) == 17: titles = [u'Prima lettura', u'Salmo responsoriale', u'Seconda lettura', u'Salmo responsoriale', u'Terza lettura', u'Salmo responsoriale', u'Quarta lettura', u'Salmo responsoriale', u'Quinta lettura', u'Salmo responsoriale', u'Sesta lettura', u'Salmo responsoriale', u'Settima lettura', u'Salmo responsoriale', u'Epistola', u'Salmo responsoriale', u'Vangelo'] else: raise Exception('Strange number of readings (%d)' % (len(quotes))) for (quote, text), title in zip(quotes, titles): reading = Reading() reading.order = order order += 1 reading.alt_num = 0 reading.mass = mass reading.title = title reading.quote = canonicalise_quote(quote) reading.text = text try: decode_quote(quote, allow_only_chap=True, valid_abbr=ABBR_VATICAN) except: reading.quote_status = u'auto invalid' else: reading.quote_status = u'auto' if text is None: reading.text_status = u'missing' else: reading.text_status = u'auto' session.add(reading) session.flush() # Write some interesting things #print '#' #print_lit_date(lit_date, PrependStream(sys.stdout, '# ')) #print #print json.dumps(event.as_dict(), encoding='utf-8', ensure_ascii=False, indent=2, sort_keys=True) #print session.commit() session.close()
def edit_month(year, month, single_day=None): session = Session() bible_query = BibleQuery() lit_years = {} editor = Editor() # From http://stackoverflow.com/questions/15120346/emacs-setting-comment-character-by-file-extension PrependStream(editor.tempfile, '# ').write(u'-*- coding: utf-8; comment-start: "#"; -*-\n') editor.tempfile.write(u'\n') def push_day(day): date = datetime.date(year, month, day) lit_date = get_lit_date(date, lit_years, session) events = map(lambda x: x[1], lit_date.competitors) print_lit_date(lit_date, PrependStream(editor.tempfile, u'# '), with_id=True) editor.tempfile.write(u'\n') editor.tempfile.write(json.dumps(map(lambda x: x.as_dict(), events), ensure_ascii=False, indent=2, sort_keys=True) + u'\n') editor.tempfile.write(u'---===---\n') editor.tempfile.write(u'\n') if single_day is not None: push_day(single_day) else: for day in real_itermonthdays(year, month): push_day(day) editor.edit() while True: lines = filter(lambda x: not x.startswith(u'#'), editor.edited_content) buf = u'' try: for line in lines: if line.strip() == u'---===---': data = json.loads(buf) for piece in data: from_dict(piece, session) buf = u'' else: buf += line session.flush() except: traceback.print_exc() sys.stdout.write("Error while parsing new content. Re-edit? [Y/n] ") answer = sys.stdin.readline().strip() if answer != '': answer = answer[0] if answer == 'n' or answer == 'N': sys.stdout.write("Aborting\n") sys.exit(0) else: sys.stdout.write("Re-editing...\n") session.rollback() edited_content = editor.edited_content editor = Editor() editor.tempfile.write("".join(edited_content)) editor.edit() else: break if editor.confirmation_request(session_has_pending_commit(session)): #reading.text = new_text session.commit() else: session.rollback()
def load_user(user_id): session = Session() r = session.query(User).get(int(user_id)) session.commit() return r
from models import Professor, User, Comment import pickle dept_map = pickle.load(open("dept_map.p", "rb")) session = Session() departments = dept_map.keys() for department in departments: for name in dept_map[department]: sections = name.split(" ") first = sections[0] middle = filter(lambda x: "." in x, sections) if len(middle) != 0: middle = middle[0] else: middle = None others = filter(lambda x: not "." in x, sections[1:]) last = "-".join(others) session.add(Professor(first_name=sections[0], middle_name=middle, last_name=last, department=department)) session.commit() query = session.query(Professor).filter_by(department= u'Industrial Engineering\xa0Professors').all() for q in query: print q.first_name, q.middle_name, q.last_name, q.department
def get_quiz(course): '''Navigates to quiz Gets all quiz ''' session = Session() try: r = s.get('https://edux.pjwstk.edu.pl/Quiz.aspx') r.raise_for_status() # quiz = [] for (quiz_id, title, start_at, finish_at, duration, score) in extract_quiz(r.content): quiz = session.query(Quiz). \ filter_by(quiz_id=quiz_id). \ first() if quiz is None: quiz = Quiz( course=course, quiz_id=quiz_id, title=title, start_at=start_at, finish_at=finish_at, duration=duration, score=score ) session.add(quiz) print u'New quiz "{0}" {1} - {2}'.format( quiz.title, quiz.start_at, quiz.finish_at) send_notify(u'Quiz "{0.title}" at {1.title}'.format(quiz, course), u'''Quiz title: {0.title} Course: {1.title} Start: {0.start_at} Finish: {0.finish_at} Duration: {0.duration} Score: {0.score} '''.format(quiz, course)) if (quiz.title != title or quiz.start_at != start_at or quiz.finish_at != finish_at or quiz.duration != duration or quiz.score != score): send_notify(u'Quiz "{0.title}" changed'.format(quiz, course), u'''Quiz title: {new[title]} (old: {0.title}) Course: {1.title} Start: {new[start_at]} (old: {0.start_at}) Finish: {new[finish_at]} (old: {0.finish_at}) Duration: {new[duration]} (old: {0.duration}) Score: {new[score]} (old: {0.score}) '''.format(quiz, course, new={'title': title, 'start_at': start_at, 'finish_at': finish_at, 'duration': duration, 'score': score})) quiz.title = title quiz.start_at = start_at quiz.finish_at = finish_at quiz.duration = duration quiz.score = score session.add(quiz) session.commit() except Exception: session.rollback() raise finally: session.close()
def get_courses(): '''Navigates to Premain Gets all courses ''' session = Session() r = s.get('https://edux.pjwstk.edu.pl/Premain.aspx') r.raise_for_status() new_announcements = [] for i, (course_id, name, url) in enumerate(extract_courses(r.content)): course = session.query(Course). \ filter_by(course_id=course_id). \ first() if course is None: print u'Add new course "{}"'.format(name) course = Course( course_id=course_id, title=name) session.add(course) print course.title # Get inside the course r = s.get(url) r.raise_for_status() session.expunge(course) course_content = r.content if 'Announcements.aspx' in course_content: print 'There are announcements' # Get announcement for this course for (timestamp, announcement) in get_announcements(course, url): new_announcements.append((course.title, timestamp, announcement)) if 'Quiz.aspx' in course_content: print 'There are quizes' get_quiz(course) if 'Folder.aspx' in course_content: print 'There are folders' get_folders(course) # Prepare email stuff from gathered data subject = 'You have {0} new announcements on EDUX'.format( len(new_announcements)) body = u'' # Sort new announcements so highest date (newer) will be on top sorted_announcements = sorted(new_announcements, key=operator.itemgetter(1), reverse=True) # TODO: Use some templating here for i, (course, timestamp, announcement) in enumerate(sorted_announcements, 1): body += u'{0}. {1} at {2}\n{3}\n\n'.format( i, timestamp, course, announcement) # Cant send empty body because mailgun throws HTTP400s. send_notify(subject, body) session.commit()