def register_view(self): form = RegistrationForm(request.form) if request.method == 'POST' and form.validate(): user = Users() user.username = form.username.data user.email = form.email.data user.roles = ','.join(form.role.data) # we hash the users password to avoid saving it as plaintext in the db, # remove to use plain text: user.password = generate_password_hash(form.password.data) Session.add(user) Session.commit() login.login_user(user) flash('Thanks for registering') return redirect(url_for('.index')) link = '<p>Already have an account? <a href="' + url_for('.login_view') + '">Click here to log in.</a></p>' form_roles = [] roles = Session.query(Role).all() for role in roles: form_roles.append((role.key, role.name)) form.role.choices = form_roles self._template_args['form'] = form self._template_args['link'] = link return super(MyAdminIndexView, self).index()
def process_program(self, id: int) -> Optional[str]: session = Session() program = session.query(Program).filter(Program.id == id).one_or_none() if not program: return state_db = Mongo(STATE_COLL) parameters = program.parameters rating = 0 for param in parameters: rating += param['weight'] * int( param['value']) # TODO: type processing program.rating = int(rating) # round score session.add(program) session.commit() state_db.update_one({'key': PROCESS_KEY}, {'$pull': {'planned': id}}) stats_db = Mongo(STATS_COLL) return f'New rating for program <{id}>: {rating}'
def add_user_controller(request): name = request.get('name') password = request.get('password') session = Session() session.add(User(name=name, password=password)) session.commit() return make_response(request, 200, f'User {name} created')
def insert_test_values(): from database import Session from database.objects import User, Address, BlogPost, Keyword from random import randint letters = "abcdefghijklmnopqrstuvwxyz" s = Session() keyword = Keyword("firstpost") for n in range(100): name = "" for l in range(randint(4, 10)): letter = letters[randint(0, len(letters) - 1)] name += letter user = User(name=name, fullname=name, password="******") user.addresses = [ Address(email_address=("*****@*****.**" % name)), Address(email_address=("*****@*****.**" % name)), ] post = BlogPost(("%ss Blog Post" % name), "This is a test", user) post.keywords.append(Keyword(name)) post.keywords.append(keyword) s.add(post) s.commit() s.close()
def update_author(id): author = Session.query(Author).filter(Author.id == id) if not request.json or not author.first(): abort(400) connected_books_ids = [] links = Session.query(AuthorBookLink).filter( AuthorBookLink.author_id == author.first().id) for link in links: connected_books_ids.append(link.book_id) links.delete() try: author.update({ 'id': request.json['id'], 'name': request.json['name'], 'updated_at': datetime.now() }) except IntegrityError: Session.rollback() return 'Id already exists', 400 for book_id in connected_books_ids: Session.add( AuthorBookLink(author_id=request.json['id'], book_id=book_id)) Session.commit() return 'OK', 200
async def create_handler(message: Message): title = message.text[8:] if title.replace(" ", "") == "": await message.reply("Empty title. Use /create [text]\n", reply=False) return session = Session() time = datetime.now() chat = get_chat(session, message.chat.id) delta = timedelta(minutes=chat.default_time) time += delta seconds = timedelta(time.second) time -= seconds queue = Queue(creator_id=message.from_user.id, message_id=message.message_id, pin_date=time, title=title, chat_id=message.chat.id) session.add(queue) session.commit() session.close() await message.reply( f"{title}\n\nPublication time: {time.strftime('%H:%M, %d.%m.%Y')}", reply=False)
def update_stock_master(): """종목 DB 갱신""" try: session = Session() start_time = time.time() stock_master = [ stock[0] for stock in session.query(종목_마스터.거래소코드).all() ] affected_rows = 0 for stock in krx.get_stock_list(): if stock['short_code'].endswith( '0') and stock['full_code'] not in stock_master: session.add( 종목_마스터(stock['marketName'], stock['short_code'][1:], stock['codeName'], stock['full_code'])) affected_rows += 1 if affected_rows > 0: session.commit() slack.send_message( 'BATCH:update_stock_master success {}건 업데이트'.format( affected_rows)) execution_time = time.time() - start_time logger.info('execution_time: {}'.format(execution_time)) logger.info('{} rows added'.format(affected_rows)) except Exception as e: logger.exception(e) slack.send_message('BATCH:update_stock_master fail {}'.format(e)) finally: session.close()
def sign_up(): form = SignUpForm() user = User() if form.validate_on_submit(): user_name = request.form.get('user_name') user_email = request.form.get('user_email') register_check = User.login_check(user_name) if register_check: flash("error: The user's name already exists!") return redirect('/sign-up') if len(user_name) and len(user_email): user.nickname = user_name user.email = user_email try: session = Session() session.add(user) session.commit() except: flash("The Database error!") return redirect('/sign-up') flash("Sign up successful!") return redirect('/index') return render_template( "sign_up.html", form=form)
def add_user(): try: # generate database schema Base.metadata.create_all(engine) # create a new session session = Session() name = request.args.get('name') age = request.args.get('age') dni = request.args.get('dni') ctry = request.args.get('country') # create an employee user = User(name, age, dni, ctry) # persists data session.add(user) # commit and close session session.commit() session.close() return 'User added' except Exception as error: return str(error)
def send_message(recipent=Form(...), header=Form(...), body=Form(...), user=Depends(manager), db: database.Session = Depends(database.get_db)): if not user: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail='Not authenticated') elif len(header) >= 25: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail='Message header cannot be more than 100 characters') elif len(body) >= 1000: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detial='Message body cannot be more than 1000 characters') else: to = db.query(database.User).filter_by(username=recipent).first() if to == None: raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail='Recipent of message not found') else: db_message = database.Messages(sent_at=time.time(), sent_by=user.username, recipent=recipent, header=header, body=body) db.add(db_message) db.commit() return models.Message(sent_at=time.time(), sent_by=user.username, recipent=recipent, header=header, body=body)
def get_announcements(course, url): '''Gets all new announcements Returns a list of all new announcements. ''' session = Session() try: r = s.get('https://edux.pjwstk.edu.pl/Announcements.aspx', stream=True) r.raise_for_status() new_announcements = extract_announcements(r.content) # All pairs of (timestamp, message) are saved to db # if they arent there already for (timestamp, message) in new_announcements: announcement = session.query(Announcement). \ filter_by(course=course, created_at=timestamp, message=message). \ first() if announcement is None: # This is what we care about announcement = Announcement( course=course, created_at=timestamp, message=message) session.add(announcement) print u'New announcement at {0}'.format(timestamp) yield (timestamp, message) session.commit() except Exception: session.rollback() raise finally: session.close()
async def callback_add_handler(callback: CallbackQuery): queue_id = int(callback.data.split("-")[1]) session = Session() record = session.query(QueueRecord).filter( QueueRecord.queue_id == queue_id, QueueRecord.user_id == callback.from_user.id).first() if record: session.close() await bot.answer_callback_query(callback.id, "You are already in the list") return queue = session.query(Queue).filter(Queue.id == queue_id).first() position = len( session.query(QueueRecord).filter( QueueRecord.queue_id == queue_id).all()) + 1 user = get_user(session, callback.from_user) session.add( QueueRecord(queue_id=queue_id, user_id=callback.from_user.id, position=position)) session.commit() text = f"{queue.title}\n\nLine:" for record in session.query(QueueRecord).filter( QueueRecord.queue_id == queue_id).all(): text += f"\n{record.position}. {record.user.user_name}" await bot.answer_callback_query(callback.id, "Entered") await bot.edit_message_text(text, queue.chat_id, queue.message_id, reply_markup=get_keyboard(queue)) session.close()
def update_book(id): book = Session.query(Book).filter(Book.id == id) if not request.json or not book.first(): abort(400) Session.query(AuthorBookLink).filter( AuthorBookLink.book_id == request.json['id']).delete() authors = request.json['authors_id'] try: book.update({ 'id': request.json['id'], 'title': request.json['title'], 'updated_at': datetime.now() }) except IntegrityError: Session.rollback() return 'Id currently exists', 400 for author_id in authors: author = Session.query(Author).filter(Author.id == author_id) if not author: Session.rollback() return 'No author with id %i' % author_id Session.add( AuthorBookLink(author_id=author_id, book_id=request.json['id'])) Session.commit() return 'OK', 200
def count(id): logger.info(f'Adding task for id: {id}') session = Session() task = session.query(Tasks).filter_by(id=id).first() res = Results(address=task.address, words_count=0, http_status_code=0) try: scrpr = Scrapper(task.address) except: scrpr = None if scrpr: err = scrpr.get_page() if not err: task.http_status_code, matches = scrpr.count_matches() task.task_status = 'FINISHED' res = Results(address=task.address, words_count=matches, http_status_code=task.http_status_code) else: print(err) session.add(res) session.commit() logger.info(task) logger.info(res)
def post(self): json_data = request.get_json() if not json_data: return {'message': 'No input data provided'}, 400 # Validate and deserialize input try: data, errors = note_schema.load(json_data) if not data.get('title') or not data.get('text'): raise ValidationError('Data not provided') except ValidationError as err: return err.messages, 422 db_session = Session() # Create new note note = Note(id=str(uuid.uuid4()), title=data.get('title'), text=data.get('text'), date_create=int(time.time()), date_update=int(time.time())) db_session.add(note) db_session.commit() result, errors = note_schema.dump( db_session.query(Note).filter(Note.id == note.id).first()) db_session.close() return result, 201
def submit(self, traj): """ Submit a job to the work-queue for further sampling. Parameters ---------- """ if traj.submit_time is not None: raise ValueError("This traj has already been submitted") Session.add(traj) Session.flush() traj.populate_default_filenames() if not hasattr(traj, "init_pdb"): raise ValueError("Traj is supposed to have a pdb object tacked on") save_file(traj.init_pdb_fn, traj.init_pdb) remote_driver_fn = os.path.split(str(traj.forcefield.driver))[1] remote_pdb_fn = "input.pdb" remote_output_fn = "production_dry{}".format(traj.forcefield.output_extension) if traj.mode is None or traj.forcefield is None: raise ValueError("malformed traj") task = Task( "python ./{driver} {pdb_fn} {ff} {water} {mode} {threads}".format( pdb_fn=remote_pdb_fn, mode=traj.mode, driver=remote_driver_fn, ff=traj.forcefield.name, water=traj.forcefield.water, threads=traj.forcefield.threads, ) ) # why does traj.forcefield.driver come out as unicode? task.specify_input_file(str(traj.forcefield.driver), remote_driver_fn) task.specify_output_file(traj.wqlog_fn, "logs/driver.log") task.specify_input_file(traj.init_pdb_fn, remote_pdb_fn) task.specify_output_file(traj.dry_xtc_fn, remote_output_fn) if self.return_wet_xtc: # this is the XTC file with waters, generated by the driver # when you're doing implicit solvent only, this stuff is not used. remote_wet_output_fn = "production_wet{}".format(traj.forcefield.output_extension) task.specify_output_file(traj.wet_xtc_fn, remote_wet_output_fn) task.specify_output_file(traj.last_wet_snapshot_fn, "last_wet_snapshot.pdb") else: logger.debug("Not requesting production_wet%s from driver (implicit)", traj.forcefield.output_extension) task.specify_tag(str(traj.id)) task.specify_algorithm(WORK_QUEUE_SCHEDULE_FILES) # what does this do? traj.submit_time = datetime.now() # need to do a commit from this the qmaster, since this is a different # session Session.commit() self.wq.submit(task) logger.info("Submitted to queue: %s", traj)
def setup_base(): Base.metadata.drop_all(bind=db, tables=[User.__table__, Address.__table__]) Base.metadata.create_all(db) session = Session() change_user = User(name="Mike", age=44) session.add(change_user) session.commit() session.close()
def echo_controller(request): data = request.get('data') session = Session() message = Message(data=data.get('text')) session.add(message) session.commit() session.close() return make_response(request, 200, data)
def save_cate(cate_name,cate_id): try: category = Categories(category_name=cate_name,category_id=cate_id) Session.add(category) Session.commit() except Exception as e: Session.rollback() raise e
def save_album(album_name,album_id,album_cover): try: category = Albums(album_name=album_name,album_id=album_id,album_cover=album_cover) Session.add(category) Session.commit() except Exception as e: Session.rollback() raise e
def index(self, fullname=None, username=None, password=None): if cherrypy.request.method == "POST": session = Session() new_user = User(username, fullname, password) session.add(new_user) session.commit() session.close() raise cherrypy.HTTPRedirect("/")
def addfav(): data = request.get_json() playlist1 = Session.query(playlist).filter(playlist.userid==session['userid']).order_by(playlist.id).first() maxorder = Session.query(func.max(playlist_item.order)).filter(playlist_item.listid == playlist1.id).first()[0] if not maxorder: maxorder = 0 Session.add(playlist_item(listid=playlist1.id, musicid=data['id'], order=maxorder+1)) Session.commit() return "Success"
def add_user(self, name, address): if not (name == '' or address == ''): s = Session() u = User(name=name, fullname='%s test' % name, password='******') u.addresses = [Address(email_address=address), ] s.add(u) s.commit() self.send_user(u) s.close()
def save_wallpaper(title, file_name, image_url, id=None, category=None, album_id=None, album_name=None ,username=None): try: wallpaper = Wallpapers(title=title, image_url=image_url, id=id, category_id=category, user_id=username, album_id=album_id, album_name=album_name) Session.add(wallpaper) Session.commit() except Exception as e: Session.rollback() raise e
def add_client(login, info): session = Session() # добавить новый элемент new_element = Client(login, info) session.add(new_element) # совершаем транзакцию session.commit()
def add_history(client_id, ip): # Для тестирования session = Session() # добавить новый элемент new_element = ClientHistory(client_id, ip) session.add(new_element) # совершаем транзакцию session.commit()
def add_author(): if not request.json or Session.query(Author).filter( Author.id == request.json['id']).first(): abort(400) new_author = Author(id=request.json['id'], name=request.json['name'], created_at=datetime.now()) Session.add(new_author) Session.commit() return 'OK', 200
def _get_or_create_last_refresh(cls, session: Session): instance = session.query(LastRefresh).first() if instance: return instance else: instance = LastRefresh( id=1, timestamp=timestamp_now() ) session.add(instance) return instance
def stage_images_in_database(images: list, collection_record: CollectionRecord, session: Session): """ Stages all images from the given collection record in a database as ImageRecords. :param images: list of Image objects. :param collection_record: CollectionRecord object corresponding to the image set. :param session: database session. """ for image in images: image_record = ImageRecord.from_image(image, collection_record) session.add(image_record)
def create_item(address: str): if URL_REGEX.match(address): task = Tasks(address=address, timestamp=datetime.utcnow()) session = Session() session.add(task) try: session.commit() except IntegrityError: raise HTTPException(status_code=422, detail='Unprocessable Entity') count.delay(task.id) else: raise HTTPException(status_code=422, detail='Unprocessable Entity')
def stage_collection_in_database(collection: Collection, session: Session) -> CollectionRecord: """ Stages a given Collection object in the database as a CollectionRecord. :param collection: Collection object. :param session: database session. :return: CollectionRecord generated from input collection. """ collection_record = CollectionRecord.from_collection(collection) session.add(collection_record) session.flush() return collection_record
def save_query(self): Base.metadata.drop_all(bind=engine) Base.metadata.create_all(bind=engine) session = Session() if not self.data: return for course in self.data: lesson = Lesson(course['start'], course['end'], course['title'], eval(course['txt'])['JSMC']) session.add(lesson) session.commit() session.close()
def main(): models.Base.metadata.create_all(engine) logging.info('Created database schema.') session = Session() add_random_animals_of_type_to_session(session, create_lion, config.LION_COUNT) add_random_animals_of_type_to_session(session, create_hippopotamus, config.HIPPOPOTAMUS_COUNT) add_random_animals_of_type_to_session(session, create_antelope, config.ANTELOPE_COUNT) add_random_animals_of_type_to_session(session, create_hyena, config.HYENA_COUNT) all_animals = session.query(models.Animal).all() friendship_count = 0 while friendship_count < config.MAX_FRIENDSHIP_COUNT: random_animal_1 = all_animals[randint(0, len(all_animals) - 1)] random_animal_2 = all_animals[randint(0, len(all_animals) - 1)] if can_become_friends(random_animal_1, random_animal_2): logging.info('Creating friendship between: {}, {}'.format( random_animal_1, random_animal_2)) random_animal_1.friends.append(random_animal_2) random_animal_2.friends.append(random_animal_1) session.add(random_animal_1) session.add(random_animal_2) friendship_count += 1 session.commit() hungriest_lion = get_hungriest_lion(session) logging.info('Hungriest lion is: {}'.format(hungriest_lion)) slowest_antelope = get_slowest_antelope(session) logging.info('Slowest antelope is: {}'.format(slowest_antelope)) graph = Graph() for animal in all_animals: for friend in animal.friends: graph.add_edge(animal.id, friend.id) try: shortest_path = find_path(graph, hungriest_lion.id, slowest_antelope.id, cost_func=get_cost) except NoPathError: logging.warn('The lion stays hungry today') else: logging.info( 'Shortest path to slowest antelope is through animal ids: {}'. format(shortest_path[0]))
async def create_user(User: UserCreate, db: Session = Depends(get_db), current_user: User = Depends(get_current_active_user)): try: user = UserModel(**User.dict()) user.password = encryptPassword(user.password) db.add(user) db.commit() db.refresh(user) except SQLAlchemyError as e: raise Exception(e) return user
def echo_controller(request): """ Function for writing down to DB users message and gives back a response :param request: raw request from client :return: prepared response """ data = request.get('data') session = Session() message = Message(content=data, user_id=1) session.add(message) session.commit() return make_response(request, 200, data)
def update_sets(verbose=True): sets = scryfall.Request("sets").data session = Session() known_ids = [t[0] for t in session.query(Set.id).all()] for s in sets: if s['id'] not in known_ids: session.add(Set.from_scryfall(s)) if verbose: print( f"New set: {s['name']} ({s['code']} [{s['card_count']} cards])" ) session.commit() session.close()
def add_forcefields_to_db(self, p): if Session.query(models.Forcefield).count() == 0: # add forcefields for ff in p: obj = models.Forcefield(**ff) obj.driver = os.path.join(self.params_dir, ff['driver']) Session.add(obj) Session.commit() else: print "NOTE: I'M NOT PARSING NEW FORCEFIELDS"
async def queue_reply_handler(message: Message): username = message.text username_plain = username.replace("@", "") session = Session() __ = get_user(session, message.from_user) # update sender obj user = session.query(User).filter(User.username == username_plain).first() if not user: session.close() await message.reply(f"`{username}` is not found.", parse_mode="Markdown") return queue = session.query(Queue).filter( Queue.message_id == message.reply_to_message.message_id).one() record = session.query(QueueRecord).filter( QueueRecord.user_id == user.id, QueueRecord.queue_id == queue.id).first() if record: session.close() await message.reply(f"`{username}` is already in the list.", parse_mode="Markdown") return if len( session.query(QueueRecord).filter( QueueRecord.creator_id == message.from_user.id).all()) >= 1: session.close() await message.reply("You cant add more than 1 user") return position = len( session.query(QueueRecord).filter( QueueRecord.queue_id == queue.id).all()) + 1 session.add( QueueRecord(queue_id=queue.id, creator_id=message.from_user.id, user_id=user.id, position=position)) session.commit() text = f"{queue.title}\n\nLine:" for record in session.query(QueueRecord).filter( QueueRecord.queue_id == queue.id).all(): text += f"\n{record.position}. {record.user.user_name}" await bot.edit_message_text(text, queue.chat_id, queue.message_id, reply_markup=get_keyboard(queue)) session.commit() session.close() await message.reply("User added")
def main(): # Creo las tablas Base.metadata.create_all(engine) comedy = Category(name="Comedy") drama = Category(name="Drama") session = Session() session.add(comedy) session.add(drama) session.add(Category(name="Musical")) session.add(Serie(title="The Big Bang Theory", description="Serie de frikis", category=comedy)) session.add(Serie(title="Juego de Tronos", description="Todos mueren", category=drama)) session.commit()
def signup(username=Form(...), email=Form(...), password=Form(...), db: database.Session = Depends(database.get_db)): if db.query(database.User).filter_by(username=username).first(): raise HTTPException(status_code=status.HTTP_302_FOUND, detail='Username taken') elif db.query(database.User).filter_by(email=email).first(): raise HTTPException(status_code=status.HTTP_302_FOUND, detail='Email address already in use') elif len(username) <= 3: raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail='Username must be longer than 3 characters') elif len(username) >= 25: raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, detail='Username is too long. Maximum length: 25 characters') elif len(password) < 7: raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail='Password must be 8 characters or more') elif len(password) >= 40: raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, detail='Password is too long. Maximum length: 40 characters') elif PasswordStats(password).strength() <= float(0.350): raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, detail= 'Password is not strong enough. Try adding some symbols or numbers your password' ) elif len(email) >= 75: raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, detail='Email is too long. Must be less than 75 characters') try: valid = validate_email(email) email = valid.email except EmailNotValidError as e: raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, detail='Email address not supported. Please use another email.') else: pwd_hash = Hash.get_password_hash(str(password)) db_user = database.User(username=username, email=email, password=pwd_hash) db.add(db_user) db.commit() db.refresh(db_user) return models.User(username=username, email=email, password=password)
def scrape_term(term): log.info("Starting on: {0}".format(term)) end_date = deepcopy(settings.END_DATE) start_date = deepcopy(settings.START_DATE) date_gap = end_date - start_date page = 0 session = Session() last_article = session.query(Article).filter_by(term=term).order_by( Article.created.desc()).first() if last_article is not None: start_date = last_article.start_date end_date = last_article.end_date if end_date >= settings.END_DATE: return while start_date < settings.END_DATE: if end_date > settings.END_DATE: end_date = deepcopy(settings.END_DATE) date_gap = end_date - start_date data = access_api(term, page, start_date, end_date) allowed_hits = (settings.MAX_HIT_PAGES * settings.HITS_PER_PAGE) while data["meta"]["hits"] > allowed_hits: date_ratio = data["meta"]["hits"] / allowed_hits date_gap = date_gap / date_ratio end_date = deepcopy(start_date) + date_gap data = access_api(term, page, start_date, end_date) log.info("Working on date range {0} to {1}, with {2} hits.".format( start_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"), data["meta"]["hits"])) page = 0 while page * settings.HITS_PER_PAGE < data["meta"]["hits"] and ( page + 1) < settings.MAX_HIT_PAGES: data = access_api(term, page, start_date, end_date) for doc in data["docs"]: article = session.query(Article).filter_by( nyt_id=doc["_id"]).first() if article is None: published = dateutil.parser.parse(doc["pub_date"]) article = Article(data=json.dumps(doc), nyt_id=doc["_id"], term=term, page=page, start_date=start_date, end_date=end_date, published=published) session.add(article) session.commit() page += 1 start_date = end_date end_date = deepcopy(start_date) + date_gap log.info("Done with term {0}.".format(term))
class Factory(object): def __init__(self, model): self.session = Session() self.model = model def fetch_all(self): return self.session.query(self.model).all() def insert(self, obj, commit=True): self.session.add(obj) if commit: self.session.commit() def commit(self): self.session.commit()
def get_folders(course): '''Gets all folders ''' session = Session() try: r = s.get('https://edux.pjwstk.edu.pl/Folder.aspx') r.raise_for_status() new_folders = extract_folders(r.content) for (folder_id, title, start_at, finish_at) in new_folders: folder = session.query(Folder). \ filter_by(folder_id=folder_id). \ first() if folder is None: folder = Folder( folder_id=folder_id, course=course, title=title, start_at=start_at, finish_at=finish_at) send_notify('New folder "{}" at {}'.format(title, course.title), '''Folder title: {0.title} Start at: {0.start_at} Finish at: {0.finish_at}'''.format(folder)) session.add(folder) if (folder.title != title or folder.start_at != start_at or folder.finish_at != finish_at): new = { 'title': title, 'start_at': start_at, 'finish_at': finish_at } send_notify('Folder "{0}" updated'.format(title), '''Folder title: {new[title]} (old: {0.title}) Start at: {new[start_at]} (old: {0.start_at}) Finish at: {new[finish_at]} (old: {0.finish_at})'''.format(folder, new=new)) folder.title = title folder.start_at = start_at folder.finish_at = finish_at session.add(folder) session.commit() finally: session.close()
def load(file_): nvdxml = utilities.NvdXml() session = Session() reader = csv.reader(file_) next(reader, None) # Ignoring the header for row in reader: debug(row) cve = Cve(id=row[0], year=utilities.get_year(row[0]), product=row[1]) nvd_details = nvdxml.get_details(cve.id) if nvd_details: cve.cvss = Cvss() cve.cvss.access_complexity = nvd_details['access-complexity'] cve.cvss.access_vector = nvd_details['access-vector'] cve.cvss.authentication = nvd_details['authentication'] cve.cvss.availability_impact = nvd_details['availability-impact'] cve.cvss.confidentiality_impact = nvd_details[ 'confidentiality-impact' ] cve.cvss.integrity_impact = nvd_details['integrity-impact'] cve.cvss.score = nvd_details['score'] cve.cvss.exploitability_subscore = nvd_details[ 'exploitability-subscore' ] cve.cvss.impact_subscore = nvd_details[ 'impact-subscore' ] cve.bounty = Bounty() cve.bounty.amount = float(row[2].replace('$', '').replace(',', '')) session.add(cve) try: session.commit() except sqlalchemy.exc.IntegrityError as e: error('{} is a duplicate.'.format(cve.id)) session.rollback() else: warning('{} was not found in NVD.'.format(cve.id))
def musicstream (): data = request.get_json() music1 = Session.query(music).filter(music.id==data['id']).one() origin = os.getcwd() + "/WebMusicPlayer/static/music/" + music1.filename[:-4] +".flv" #time hash : hashlib.md5(str(datetime.today())).hexdigest() flvfile = unicode(session['userid']) +"_"+hashlib.md5(str(datetime.today())).hexdigest() + ".flv" link = "/tmp/flvs/" + flvfile if os.path.isfile(origin): #subprocess.call(["rm /tmp/flvs/"+ unicode(session['userid']) +"_*.flv"],shell=True) subprocess.call(["ln","-s",origin,link]) json_data = dict (flv = flvfile) Session.query(music).filter(music.id==data['id']).update({"count": music1.count+1}) Session.add(feed(title=session['realname'],text =(session['realname'] + u"님이 "+music1.name+u"을 들었습니다."), userid=session['userid'])) Session.commit() else : json_data = dict (flv = "") print "File Not Found : " + origin return jsonify(json_data)
def run_round(checkdata=True): """Activate the builder and build new MSMs (if necessary) First, check to see if there is enough data are to warrant building a new set of MSMs. Assuming yes, do a joint clustering over all of the data, and then build MSMs for each forcefield on that state space. Parameters ---------- checkdata : boolean, optional If False, skip the checking process Returns ------- happened : boolean True if we actually did a round of MSM building, False otherwise """ if checkdata: logger.info("Checking if sufficient data has been acquired.") if not is_sufficient_new_data(): return False else: logger.info("Skipping check for adequate data.") # use all the data together to get the cluster centers generators, db_trajs = joint_clustering() msmgroup = MSMGroup(trajectories=db_trajs) for ff in Session.query(Forcefield).all(): trajs = filter(lambda t: t.forcefield == ff, db_trajs) msm = build_msm(ff, generators=generators, trajs=trajs) msmgroup.markov_models.append(msm) # add generators to msmgroup Session.add(msmgroup) Session.flush() msmgroup.populate_default_filenames() msmgroup.trajectories = db_trajs msmgroup.n_states = len(generators) save_file(msmgroup.generators_fn, generators) for msm in msmgroup.markov_models: msm.populate_default_filenames() if hasattr(msm, 'counts'): save_file(msm.counts_fn, msm.counts) if hasattr(msm, 'assignments'): save_file(msm.assignments_fn, msm.assignments) if hasattr(msm, 'distances'): save_file(msm.distances_fn, msm.distances) save_file(msm.inverse_assignments_fn, dict(MSMLib.invert_assignments(msm.assignments))) # ======================================================================# # HERE IS WHERE THE ADAPTIVE SAMPLING ALGORITHMS GET CALLED # The obligation of the adaptive_sampling routine is to set the # model_selection_weight on each MSM/forcefield and the microstate # selection weights # check to make sure that the right fields were populated try: Project().adaptive_sampling(Session, msmgroup) for msm in msmgroup.markov_models: if not isinstance(msm.model_selection_weight, numbers.Number): raise ValueError('model selection weight on %s not set correctly' % msm) if not isinstance(msm.microstate_selection_weights, np.ndarray): raise ValueError('microstate_selection_weights on %s not set correctly' % msm) except Exception as e: logging.error('ADAPTIVE SAMPLING ERROR') logging.error(e) sampling.default(Session, msmgroup) #=======================================================================# Session.flush() logger.info("Round completed sucessfully") return True
def import_from_scrape(year, month): lit_years = {} session = Session() for day in real_itermonthdays(year, month): date = datetime.date(year, month, day) print >> sys.stderr, "Importing %s..." % (date) lit_date = get_lit_date(date, lit_years, session) # Check if we already have a mass here try: lit_date.get_masses(strict=False) except SelectingMassException: pass else: print >> sys.stderr, " * skipping because a valid mass already exists" continue winner = lit_date.get_winner(remove_ok=True) if winner is None: print >> sys.stderr, " * skipping because there are no masses today" continue event = winner[1] with open(os.path.join('scrape', '%04d-%02d-%02d.html' % (year, month, day))) as fhtml: quotes = scrape_file(fhtml) if u'auto' not in event.status.split(u' '): event.status += u' auto' mass = Mass() mass.order = 0 mass.event = event mass.digit = lit_date.digit mass.letter = lit_date.letter mass.title = None mass.status = u'auto' session.add(mass) order = 0 if len(quotes) == 4: titles = [u'Prima lettura', u'Salmo responsoriale', u'Seconda lettura', u'Vangelo'] elif len(quotes) == 3: titles = [u'Prima lettura', u'Salmo responsoriale', u'Vangelo'] # Domenica delle Palme elif len(quotes) == 5: titles = [u'Vangelo delle Palme', u'Prima lettura', u'Salmo responsoriale', u'Seconda lettura', u'Vangelo'] # Pasqua elif len(quotes) == 17: titles = [u'Prima lettura', u'Salmo responsoriale', u'Seconda lettura', u'Salmo responsoriale', u'Terza lettura', u'Salmo responsoriale', u'Quarta lettura', u'Salmo responsoriale', u'Quinta lettura', u'Salmo responsoriale', u'Sesta lettura', u'Salmo responsoriale', u'Settima lettura', u'Salmo responsoriale', u'Epistola', u'Salmo responsoriale', u'Vangelo'] else: raise Exception('Strange number of readings (%d)' % (len(quotes))) for (quote, text), title in zip(quotes, titles): reading = Reading() reading.order = order order += 1 reading.alt_num = 0 reading.mass = mass reading.title = title reading.quote = canonicalise_quote(quote) reading.text = text try: decode_quote(quote, allow_only_chap=True, valid_abbr=ABBR_VATICAN) except: reading.quote_status = u'auto invalid' else: reading.quote_status = u'auto' if text is None: reading.text_status = u'missing' else: reading.text_status = u'auto' session.add(reading) session.flush() # Write some interesting things #print '#' #print_lit_date(lit_date, PrependStream(sys.stdout, '# ')) #print #print json.dumps(event.as_dict(), encoding='utf-8', ensure_ascii=False, indent=2, sort_keys=True) #print session.commit() session.close()
def run(): channels = Session.query(Channel).all() channel_map = {} config['CHAN'] = [] for channel in channels: config['CHAN'].append(channel.name) channel_map[channel.name] = channel.id s = None try: s = socket.socket() s.connect((config['HOST'], config['PORT'])) s.send("NICK %s\r\n" % config['NICK']) s.send("USER %s %s bla :%s\r\n" % (config['IDEN'], config['HOST'], config['REAL'])) buf = "" flag = 1 # flag for connected loop # Operate until it tells us we have connected while flag: buf = buf + s.recv(1024) tmp = string.split(buf, "\n") buf = tmp.pop() for line in tmp: line = string.rstrip(line) line = string.split(line) if line[0] == "PING": s.send("PONG %s\r\n" % line[1]) if line[1] == "004": for chan in config['CHAN']: s.send("JOIN %s\r\n" % chan) flag = False # Loop break # Operate whenever socket received while 1: buf = buf + s.recv(1024) tmp = string.split(buf, "\n") buf = tmp.pop() for line in tmp: line = string.rstrip(line) line = string.split(line) # print line # Pong when Ping arrived if line[0] == "PING": s.send("PONG %s\r\n" % line[1]) # Message from Channels if line[1] == "PRIVMSG": sender = string.split(line[0], "!")[0][1:] # User who told obj = line[2] # Channel(User) where(who) message happened(received) msg = " ".join(line[3:])[1:] # Message if obj == config['NICK']: pass else: log = Log(sender, channel_map[obj], msg, datetime.now()) Session.add(log) Session.commit() for m in msg.split(' '): tmp = urlparse(m) if tmp.scheme in ['http', 'https', 'ftp']: link = Link(log, tmp.geturl()) Session.add(link) Session.commit() finally: s.close()
from models import Professor, User, Comment import pickle dept_map = pickle.load(open("dept_map.p", "rb")) session = Session() departments = dept_map.keys() for department in departments: for name in dept_map[department]: sections = name.split(" ") first = sections[0] middle = filter(lambda x: "." in x, sections) if len(middle) != 0: middle = middle[0] else: middle = None others = filter(lambda x: not "." in x, sections[1:]) last = "-".join(others) session.add(Professor(first_name=sections[0], middle_name=middle, last_name=last, department=department)) session.commit() query = session.query(Professor).filter_by(department= u'Industrial Engineering\xa0Professors').all() for q in query: print q.first_name, q.middle_name, q.last_name, q.department
def get_quiz(course): '''Navigates to quiz Gets all quiz ''' session = Session() try: r = s.get('https://edux.pjwstk.edu.pl/Quiz.aspx') r.raise_for_status() # quiz = [] for (quiz_id, title, start_at, finish_at, duration, score) in extract_quiz(r.content): quiz = session.query(Quiz). \ filter_by(quiz_id=quiz_id). \ first() if quiz is None: quiz = Quiz( course=course, quiz_id=quiz_id, title=title, start_at=start_at, finish_at=finish_at, duration=duration, score=score ) session.add(quiz) print u'New quiz "{0}" {1} - {2}'.format( quiz.title, quiz.start_at, quiz.finish_at) send_notify(u'Quiz "{0.title}" at {1.title}'.format(quiz, course), u'''Quiz title: {0.title} Course: {1.title} Start: {0.start_at} Finish: {0.finish_at} Duration: {0.duration} Score: {0.score} '''.format(quiz, course)) if (quiz.title != title or quiz.start_at != start_at or quiz.finish_at != finish_at or quiz.duration != duration or quiz.score != score): send_notify(u'Quiz "{0.title}" changed'.format(quiz, course), u'''Quiz title: {new[title]} (old: {0.title}) Course: {1.title} Start: {new[start_at]} (old: {0.start_at}) Finish: {new[finish_at]} (old: {0.finish_at}) Duration: {new[duration]} (old: {0.duration}) Score: {new[score]} (old: {0.score}) '''.format(quiz, course, new={'title': title, 'start_at': start_at, 'finish_at': finish_at, 'duration': duration, 'score': score})) quiz.title = title quiz.start_at = start_at quiz.finish_at = finish_at quiz.duration = duration quiz.score = score session.add(quiz) session.commit() except Exception: session.rollback() raise finally: session.close()
def get_courses(): '''Navigates to Premain Gets all courses ''' session = Session() r = s.get('https://edux.pjwstk.edu.pl/Premain.aspx') r.raise_for_status() new_announcements = [] for i, (course_id, name, url) in enumerate(extract_courses(r.content)): course = session.query(Course). \ filter_by(course_id=course_id). \ first() if course is None: print u'Add new course "{}"'.format(name) course = Course( course_id=course_id, title=name) session.add(course) print course.title # Get inside the course r = s.get(url) r.raise_for_status() session.expunge(course) course_content = r.content if 'Announcements.aspx' in course_content: print 'There are announcements' # Get announcement for this course for (timestamp, announcement) in get_announcements(course, url): new_announcements.append((course.title, timestamp, announcement)) if 'Quiz.aspx' in course_content: print 'There are quizes' get_quiz(course) if 'Folder.aspx' in course_content: print 'There are folders' get_folders(course) # Prepare email stuff from gathered data subject = 'You have {0} new announcements on EDUX'.format( len(new_announcements)) body = u'' # Sort new announcements so highest date (newer) will be on top sorted_announcements = sorted(new_announcements, key=operator.itemgetter(1), reverse=True) # TODO: Use some templating here for i, (course, timestamp, announcement) in enumerate(sorted_announcements, 1): body += u'{0}. {1} at {2}\n{3}\n\n'.format( i, timestamp, course, announcement) # Cant send empty body because mailgun throws HTTP400s. send_notify(subject, body) session.commit()