def comment_delete( self, id ): item = db.Review.by_id( id ) if id == None: return False db.session().delete( item ) db.session().commit() return True
def new_user( self, user, email, email_confirm, password, password_confirm ): u = db.User.by_email( email ) if u != None: return {}, {}, { "email": [u"La dirección '%s' ya está dada de alta, escoja otra." % email] } errors = defaultdict( list ) if len( user.strip() ) < 4: errors["user"].append( "El nombre de usuario debe tener al menos 4 caracteres." ) if email != email_confirm: errors["email_confirm"].append( "La dirección de correo y la confirmación no coinciden" ) if len( password.strip() ) < 6: errors["password"].append( "La contraseña debe tener 6 caracteres o más" ) if password != password_confirm: errors["password_confirm"].append( "La contraseña y la confirmación deben coincidir" ) if len( errors ) == 0: u = db.User() u.email = email u.set_password( password, self.key ) u.name = user u.groups.append( db.session().query( db.CatalogEntry )\ .filter( db.CatalogEntry.catalog_name == "user_groups" )\ .filter( db.CatalogEntry.value == "Usuarios" )\ .first() ) db.session().add( u ) db.session().commit() return helpers.get( db.User ).to_dictionary( u ), {}, {} return {}, {}, dict( errors )
def review_update( self, arguments, warnings, errors ): if arguments["id"] == "new": record = db.Review() else: record = db.Review.by_id( arguments["id"] ) if len( arguments ) > 1: if arguments["name"].strip() == "": errors["name"].append( u"Es necesario asignar un nombre al comentario" ) if arguments["alias"].strip() == "": errors["alias"].append( u"Es necesario capturar un nombre de usuario" ) if len( arguments["content"].split() ) < 10: errors["content"].append( u"El comentario debe tener al menos 10 palabras" ) if len( errors ) == 0: helpers.get( db.Review ).to_record( arguments, record ) record.date = datetime.datetime.now() db.session().add( record ) db.session().commit() arguments = helpers.get( db.Review ).to_dictionary( record ) admins = db.session().query( db.User )\ .filter( db.CatalogEntry.value == "Administradores" )\ .all() for user in admins: t = { "recipient": user.name } t["review"] = arguments t["product"] = helpers.get( db.Product ).to_dictionary( record.product ) send_mail( "Nuevo comentario", "*****@*****.**", [user.email], "mail/new_review.txt", t ) else: arguments = helpers.get( db.Review ).to_dictionary( record ) return arguments
def product_delete( self, id ): product = db.Product.by_id( id ) if id == None: return False db.session().delete( product ) db.session().commit() return True
def pager( self, table, filter_field, filter, sort_by, descending, offset, limit, prefilter=[] ): table = db.__dict__[table] def build_query( query ): for field, value in prefilter: query = query.filter( getattr( table, field ) == value ) if filter != "": if type( filter_field ) != list: for f in filter.split(): query = query.filter( getattr( table, filter_field ).like( "%%%s%%" % filter ) ) else: t = db.__dict__[filter_field[0]] for f in filter.split(): query = query.filter( getattr( t, filter_field[1] ).like( "%%%s%%" % filter ) ) if not descending: query = query.order_by( db.func.lower( getattr( table, sort_by ) ).asc() ) else: query = query.order_by( db.func.lower( getattr( table, sort_by ) ).desc() ) query = query.distinct() return query result = build_query( db.session().query( table ) ) count = build_query( db.session().query( db.func.count( db.distinct( table.id ) ) ) ).one()[0] result = result.limit( limit ).offset( offset ).all() result = [ helpers.get( table ).to_dictionary( p ) for p in result ] return result, count
def ad_delete( self, id ): record = db.Ad.by_id( id ) if id == None: return False db.session().delete( record ) db.session().commit() return True
def product_pager( self, filter_field, filter, sort_by, descending, page, limit, genre="", prefilter=[] ): def build_query( query ): if genre != "": query = query.join( db.Product.genres ) query = query.filter( "product_genre_1.genre_id = %s" % genre ) for field, value in prefilter: query = query.filter( getattr( db.Product, field ) == value ) if filter != "": for f in filter.split(): query = query.filter( getattr( db.Product, filter_field ).like( "%%%s%%" % filter ) ) if not descending: query = query.order_by( db.func.lower( getattr( db.Product, sort_by ) ).asc() ) else: query = query.order_by( db.func.lower( getattr( db.Product, sort_by ) ).desc() ) query = query.distinct() return query result = build_query( db.session().query( db.Product ) ) count = build_query( db.session().query( db.func.count( db.distinct( db.Product.id ) ) ) ).one()[0] result = result.limit( limit ).offset( page * limit ).all() result = [ helpers.get( db.Product ).to_dictionary( p ) for p in result ] return result, int( math.ceil( 1.0 * count / limit ) )
def order_send( self, id ): order = db.Order.by_id( id ) if id == None: return False order.status = db.Order.ORDER_DELIVERED db.session().add( order ) db.session().commit() return True
def update_views( self, user_id, product_id ): user = db.User.by_id( user_id ) product = db.Product.by_id( product_id ) statistics = db.Statistics() statistics.user = user statistics.product = product statistics.view = 1 statistics.timestamp = time.time() db.session().add( statistics ) db.session().commit() return True
def patch(self, review_id): # for editing minor properties like titles print flask.request.json review = db.session().query(model.Review).filter_by(review_id=review_id).one() for (k, v) in flask.request.json.iteritems(): if hasattr(review, k): setattr(review, k, v) db.session().commit() return self.get(review_id)
def ad( self, ad_type ): ad_type = db.session().query( db.CatalogEntry )\ .filter( db.CatalogEntry.catalog_name == "ad_types" )\ .filter( db.CatalogEntry.value == ad_type )\ .first() result = db.session().query( db.Ad )\ .filter( db.Ad.ad_type == ad_type )\ .filter( db.Ad.valid_until > datetime.datetime.now().date() )\ .filter( db.Ad.enabled == True )\ .all() result = random.sample( result, 1 )[0] return helpers.get( db.Ad ).to_dictionary( result )
def create_file(info): if info is None: return None if isinstance(info, model.File): return info (commit, path) = info contents = git.show_file(review.project_id, commit, path) result = model.File.find_or_create(db.session(), path, contents) db.session().add(result) return result
def save_binary( self, filename, content_type, content ): content = content.decode( "base64" ) hash = hashlib.md5( content ).hexdigest() result = db.session().query( db.BinaryContent )\ .filter( db.BinaryContent.content_type == content_type )\ .filter( db.BinaryContent.hash == hash )\ .first() if result == None: result = db.BinaryContent( filename, hash, content, content_type ) db.session().add( result ) db.session().commit() return result.id
def test_sign_up(self): app.test_client().post('/', data={'email': '*****@*****.**'}) users = session().query(User.email).all() eq_(users, [('*****@*****.**',)]) self.visit('/') self.browser.fill('email', '*****@*****.**') self.browser.find_by_name('go').click() assert self.browser.is_text_present('Thanks'), 'rude!' users = session().query(User.email).all() eq_(users, [('*****@*****.**',), ('*****@*****.**',)])
def get_icon_app(id, type_, size, callback): with db.session(): from item.models import Item item = Item.get(id) if not item: data = '' else: if type_ == 'cover' and not item.meta.get('cover'): type_ = 'preview' if type_ == 'preview' and not item.files.count(): type_ = 'cover' if size: skey = '%s:%s:%s' % (type_, id, size) key = '%s:%s' % (type_, id) data = None if size: data = icons[skey] if data: size = None if not data: data = icons[key] if not data: data = icons.black() size = None if size: data = icons[skey] = resize_image(data, size=size) data = bytes(data) or '' callback(data)
def recommendations(): TEMPLATE_PATH.insert(0, '') s = session() # 1. Classify labeled news rows = s.query(News).filter(News.label != None).all() X, y = [], [] for row in rows: X.append(row.title) y.append(row.label) X = [clean(x).lower() for x in X] model = NaiveBayesClassifier() model.fit(X, y) # 2. Get unlabeled news new_rows = s.query(News).filter(News.label == None).all() # 3. Get predictions marked = [] for row in new_rows: marked.append((model.predict(row.title.split()), row)) # 4. Print ranked table return template('news_ranked', rows=marked)
def products(self): data = db.session().query(db.Product).all() result = [] for p in data: d = helpers.get(db.Product).to_dictionary(p) result.append(d) return result
def orders(self): orders = db.session().query(db.Order).all() orders = [ helpers.get(db.Order).to_dictionary(order) for order in orders ] print orders return orders
def get(self): results = [] for project in db.session().query(model.Project).all(): results.append(project.to_dict()) return results
def check_nodes(): if state.online: with db.session(): for u in user.models.User.query.filter_by(queued=True): if not state.nodes.is_online(u.id): logger.debug('queued peering message for %s trying to connect...', u.id) state.nodes.queue('add', u.id)
def products( self ): data = db.session().query( db.Product ).all() result = [] for p in data: d = helpers.get( db.Product ).to_dictionary( p ) result.append( d ) return result
def get(self, id, include_body=True): with db.session(): item = Item.get(id) path = item.get_path() if item else None if not item or not path: self.set_status(404) return mimetype = { 'cbr': 'application/x-cbr', 'cbz': 'application/x-cbz', 'epub': 'application/epub+zip', 'pdf': 'application/pdf', 'txt': 'text/plain', }.get(path.split('.')[-1], None) if mimetype == 'text/plain': try: open(path, 'rb').read().decode('utf-8') mimetype = 'text/plain; charset=utf-8' except: mimetype = 'text/plain; charset=latin-1' if self._attachment: disposition = os.path.basename(path) else: disposition = None return serve_static(self, path, mimetype, include_body, disposition=disposition)
def run_scan(): remove_missing() prefs = settings.preferences prefix = os.path.join(os.path.expanduser(prefs["libraryPath"]), "Books" + os.sep) if not prefix[-1] == os.sep: prefix += os.sep assert isinstance(prefix, str) books = [] for root, folders, files in os.walk(prefix): for f in files: if not state.tasks.connected: return # if f.startswith('._') or f == '.DS_Store': if f.startswith("."): continue f = os.path.join(root, f) ext = f.split(".")[-1] if ext in extensions: books.append(f) position = 0 added = 0 for f in ox.sorted_strings(books): if not state.tasks.connected: return position += 1 with db.session(): id = media.get_id(f) file = File.get(id) if not file: file = add_file(id, f, prefix, f) added += 1 trigger_event("change", {})
def catalog( self, catalog_name ): result = db.session().query( db.CatalogEntry )\ .filter( db.CatalogEntry.catalog_name == catalog_name )\ .all() return [ { "id": v.id, "value": v.value } for v in result ]
def command_shell(*args): ''' Runs a Python shell inside the application context. ''' context = None banner = 'Open Media Library' import db with db.session(): # Try BPython try: from bpython import embed embed(banner=banner, locals_=context) return except ImportError: pass # Try IPython try: try: # 0.10.x from IPython.Shell import IPShellEmbed ipshell = IPShellEmbed(banner=banner) ipshell(global_ns=dict(), local_ns=context) except ImportError: # 0.12+ from IPython import embed embed(banner1=banner, user_ns=context) return except ImportError: pass import code # Use basic python shell code.interact(banner, local=context)
def get(self, review_id): review = db.session().query(model.Review).filter_by(review_id=review_id).one() result = review.to_dict( include=['project', 'rounds'], exclude=['project_id'], childargs={ 'rounds': { 'include': ['changes'], 'exclude': ['review_id'], 'childargs': { 'changes': { 'include': ['merge_base_file', 'branch_tip_file', 'prev_tip_file'], 'exclude': ['merge_base_file_id', 'branch_tip_file_id', 'prev_tip_file_id'], 'childargs': { 'merge_base_file': {'exclude': ['content_hash', 'contents']}, 'branch_tip_file': {'exclude': ['content_hash', 'contents']}, 'prev_tip_file': {'exclude': ['content_hash', 'contents']}, } } } } }) if len(review.rounds) > 0: result['latest_round'] = review.rounds[-1].to_dict(exclude=['review_id']) else: result['latest_round'] = None return result
def run(self): sess = None try: self._log( "Starting to parse the file with '" + self.contract_name + "'.") sess = db.session() batch = Batch.get_by_id(sess, self.batch_id) raw_bills = self.parser.make_raw_bills() self._log( "Successfully parsed the file, and now I'm starting to " "insert the raw bills.") for self.bill_num, raw_bill in enumerate(raw_bills): try: db.set_read_write(sess) bill_type = BillType.get_by_code( sess, raw_bill['bill_type_code']) bill = batch.insert_bill( sess, raw_bill['account'], raw_bill['reference'], raw_bill['issue_date'], raw_bill['start_date'], raw_bill['finish_date'], raw_bill['kwh'], raw_bill['net'], raw_bill['vat'], raw_bill['gross'], bill_type, raw_bill['breakdown']) sess.flush() for raw_read in raw_bill['reads']: tpr_code = raw_read['tpr_code'] if tpr_code is None: tpr = None else: tpr = Tpr.get_by_code(sess, tpr_code) prev_type = ReadType.get_by_code( sess, raw_read['prev_type_code']) pres_type = ReadType.get_by_code( sess, raw_read['pres_type_code']) read = bill.insert_read( sess, tpr, raw_read['coefficient'], raw_read['units'], raw_read['msn'], raw_read['mpan'], raw_read['prev_date'], raw_read['prev_value'], prev_type, raw_read['pres_date'], raw_read['pres_value'], pres_type) sess.expunge(read) sess.commit() self.successful_bills.append(raw_bill) sess.expunge(bill) except UserException, e: sess.rollback() raw_bill['error'] = str(e) self.failed_bills.append(raw_bill) if len(self.failed_bills) == 0: self._log( "All the bills have been successfully loaded and attached " "to the batch.") else: self._log( "The import has finished, but " + str(len(self.failed_bills)) + " bills failed to load.")
def add_label(): s = session() news_id = request.query.id news_label = request.query.label curr_news = s.query(News).filter(News.id == news_id) curr_news.update({'label': news_label}) s.commit() redirect("/news")
def fetch_valid_one(user_id): s = session() user = s.query(User).filter(User.valuable == True, User.is_working == False, User.freeze_status == False, User.user_id == user_id).first() s.close() return user
async def db_session_middleware(request: Request, call_next): response = Response("Internal server error", status_code=500) try: request.state.db = session() response = await call_next(request) finally: request.state.db.close() return response
def fetch_waiting_tasks(app_type): s = session() tasks = s.query(Task).filter(Task.status == TASK_STATUS_WAIT, User.app_type == app_type, Task.fire_time < now()).all() s.close() return tasks
def _add(self, user_id): if user_id not in self._nodes: from user.models import User with db.session(): self._nodes[user_id] = Node(self, User.get_or_create(user_id)) else: if not self._nodes[user_id].online: self._nodes[user_id].ping()
def ad_update( self, arguments, warnings, errors ): if arguments["id"] == "new": ad = db.Ad() else: ad = db.Ad.by_id( arguments["id"] ) if len( arguments ) > 1: if arguments["name"].strip() == "": errors["name"].append( u"Es necesario asignar un nombre al anuncio" ) if len( errors ) == 0: helpers.get( db.Ad ).to_record( arguments, ad ) db.session().add( ad ) db.session().commit() arguments = helpers.get( db.Ad ).to_dictionary( ad ) else: arguments = helpers.get( db.Ad ).to_dictionary( ad ) return arguments
def add_label(): s = session() label = request.query.label row_id = request.query.id row = s.query(News).filter(News.id == row_id).one() row.label = label s.commit() redirect("/news")
def connectionLost(self, reason): self.logger.info(reason.getErrorMessage()) self.factory.connections.remove(self) if self.object is not None: with session() as s: self.object.connected = False self.object.connection = None s.add(self.object)
def news_list(): s = session() rows = s.query(News).filter().all()[::-1] if len(rows) == 0: # Cайт ведет себя сранно при запросе к более чем старнциам, поэтому пока их будет заполняться 5 штук news = get_news("https://news.ycombinator.com/newest", 5) for neew in news[::-1]: s = session() add_news = News(title=neew['title'], author=neew['author'], url=neew['url'], comments=neew['comments'], points=neew['points']) s.add(add_news) s.commit() rows = s.query(News).filter(News.label == None).all()[::-1] return template('news_template', rows=rows)
def create_model(): s = session() labeled_news = s.query(News).filter(News.label != None).all() x_train = [clean(news.title) for news in labeled_news] y_train = [news.label for news in labeled_news] classifier = NaiveBayesClassifier(0.05) [labels, model] = classifier.fit(x_train, y_train) return template("news_model", labels=labels, model=model )
def add_label(): this_label = request.query.label this_id = request.query.id s = session() changing_news = s.query(News).get(this_id) changing_news.label = this_label s.commit() redirect("/news")
def add_label(): s = session() # айди новости равен айди кнопки news = s.query(News).filter(News.id == request.query.id).one() # присваиваем выбранную метку news.label = request.query.label s.commit() redirect("/news")
def testLastEvent(self): event = db.Event.add('oy', db.Events.start, datetime.datetime.now(), 'sample task') s = db.session() task = db.Task.get('sample task', s) oy = db.User.get('oy', s) le = oy.lastEvent() self.assert_(le) self.assertEqual(str(le), str(event))
def add_label(): label = request.query['label'] id = request.query['id'] s = session() needed_new = s.query(News).filter(News.id == id).first() needed_new.label = label s.commit() redirect("/news")
def add_label(): ids = request.query.get("id") labels = request.query.get("label") s = session() for item in s.query(News).filter(News.id == ids).all(): item.label = labels s.commit() redirect("/news")
def fetch_valid_users(app_type): s = session() users = s.query(User).filter(User.valuable == True, User.is_working == False, User.freeze_status == False, User.app_type == app_type).all() s.close() return users
def add_label(): s = session() label = request.query.label news_id = request.query.id news = s.query(News).filter(News.id == news_id).one() news.label = label s.commit() redirect("/news")
def update_task_firetime(self, firetime): self.fire_time = firetime self.update_time = now() s = session() s.query(Task).filter(Task.id == self.id).update( {Task.fire_time: firetime}) s.commit() s.close()
def add_label(): query = request.query.decode() id = int(query["id"]) label = query["label"] s = session() s.query(News).filter(News.id == id).update({News.label: label}) s.commit() redirect("/news")
def find_in_catalog( self, catalog_name, term ): result = db.session().query( db.CatalogEntry )\ .filter( db.CatalogEntry.catalog_name == catalog_name )\ .filter( db.CatalogEntry.value.like( "%%%s%%" % term ) )\ .all() return [ { "id": v.id, "value": v.value } for v in result ]
def add_label(): news_id = request.query.id label = request.query.label s = session() qurent = s.query(News).filter(News.id == news_id).one() qurent.label = label s.commit() redirect("/news")
def content(): sess = None try: sess = db.session() contract = Contract.get_mop_by_id(sess, contract_id) forecast_date = computer.forecast_date() yield 'Import MPAN Core, Export MPAN Core, Start Date, Finish Date' bill_titles = computer.contract_func( caches, contract, 'virtual_bill_titles', None)() for title in bill_titles: yield ',' + title yield '\n' for era in sess.query(Era).filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date, Era.mop_contract_id == contract.id).order_by(Era.supply_id): import_mpan_core = era.imp_mpan_core if import_mpan_core is None: import_mpan_core_str = '' else: mpan_core = import_mpan_core is_import = True import_mpan_core_str = mpan_core export_mpan_core = era.exp_mpan_core if export_mpan_core is None: export_mpan_core_str = '' else: is_import = False mpan_core = export_mpan_core export_mpan_core_str = mpan_core yield import_mpan_core_str + ',' + export_mpan_core_str + ',' + \ hh_format(start_date) + ',' + hh_format(finish_date) + ',' supply_source = computer.SupplySource( sess, start_date, finish_date, forecast_date, era, is_import, None, caches) computer.contract_func( caches, contract, 'virtual_bill', None)(supply_source) bill = supply_source.mop_bill for title in bill_titles: if title in bill: yield '"' + str(bill[title]) + '",' del bill[title] else: yield ',' for k in sorted(bill.keys()): yield ',"' + k + '","' + str(bill[k]) + '"' yield '\n' except: yield traceback.format_exc() finally: if sess is None: sess.close()
def check_nodes(): if state.online: with db.session(): for u in user.models.User.query.filter_by(queued=True): if not state.nodes.is_online(u.id): logger.debug( 'queued peering message for %s trying to connect...', u.id) state.nodes.queue('add', u.id)
def add_label(): s = session() id = request.query.id label = request.query.label news = s.query(News).get(id) news.label = label s.commit() redirect("/news")
def add_label(): s = session() label = request.query.label row_id = request.query.id row = s.query(News).filter(News.id == row_id).first() row.label = label s.commit() classifier.fit([clean(row.title).lower()], [label]) redirect("/news")
def add(**kwargs): # 创建session对象: s = session() # 添加到session: s.add(User(**kwargs)) # 提交即保存到数据库: s.commit() # 关闭session: s.close()
def create_default_lists(user_id=None): with db.session(): user_id = user_id or settings.USER_ID user = User.get_or_create(user_id) user.update_name() for list in settings.config['lists']: l = List.get(user_id, list['title']) if not l: l = List.create(user_id, list['title'], list.get('query'))
def add_label(): s = session() label, _id = request.query["label"], request.query["id"] row = s.query(News).filter(News.id == _id).all()[0] row.label = label s.add(row) s.commit() redirect("/news")
def add_label(): lbl = request.query["label"] id1 = request.query["id"] session = session() sess = session.query(News).get(id1) sess.lbl = lbl session.add(s) session.commit() redirect("/news")
def root(): lookup = TemplateLookup(directories=['templates'], strict_undefined=True) template = lookup.get_template('main.html') s = db.session() todos = s.query(db.Todo).all() parsedTodos = [] for todo in todos: parsedTodos.append(todo.toDict()) return template.render(todo=parsedTodos)
async def incepe_rand(ctx): s = session() questStartedId = Quest.startRandom(s) print(questStartedId) await ctx.send("Questul urmator a fost inceput, mult noroc coita :sunglasses:") questString = getQuestStringByID(questStartedId) await ctx.send(questString) s.close()
def add_label(): news_id = request.query['id'] label = request.query['label'] s = session() news_item = s.query(News).filter(News.id == news_id).all()[0] news_item.label = label s.add(news_item) s.commit() redirect("/news")