from flask.ext.assets import Environment, Bundle from flask.ext.sqlalchemy import before_models_committed, event from sqlalchemy.dialects.postgres import JSONB from sqlalchemy.sql.expression import cast app = Flask(__name__) app.config.from_object('config') from cms.models import db, Document, on_before_models_committed, on_before_insert db.init_app(app) with app.app_context(): db.create_all() event.listen(Document, 'before_insert', on_before_insert) before_models_committed.connect(on_before_models_committed, app) assets = Environment(app) style = Bundle('../styles/site.scss', filters='scss,cssmin', output='styles/site.css') assets.register('style', style) @app.route('/') def home(): things = Document.query.filter(Document.data['type'].astext == 'thing') return render_template('home.html', things=things)
from flask.ext.assets import Environment, Bundle from flask.ext.sqlalchemy import before_models_committed, event from sqlalchemy.dialects.postgres import JSONB from sqlalchemy.sql.expression import cast app = Flask(__name__) app.config.from_object('config') from cms.models import db, Document, on_before_models_committed, on_before_insert db.init_app(app) with app.app_context(): db.create_all() event.listen(Document, 'before_insert', on_before_insert) before_models_committed.connect(on_before_models_committed, app) assets = Environment(app) style = Bundle('../styles/site.scss', filters='scss,cssmin', output='styles/site.css') assets.register('style', style) @app.route('/') def home(): things = Document.query.filter(Document.data['type'].astext == 'thing') return render_template('home.html', things=things) if __name__ == '__main__': app.run('127.0.0.1', 3030)
def create_auth_token(self): s = Serializer(SECRET_KEY) return s.dumps({'id': self.id}) @staticmethod def verify_auth_token(token): token = bytes(token, encoding='utf-8') s = Serializer(SECRET_KEY) try: data = s.loads(token) except SignatureExpired as e: return None except BadSignature as e: return None user = User.query.get(data['id']) if user: return True else: return False # 数据库创建时,默认插入默认账户 def insert_default(target, connection, **kw): default_user = User("admin", pwd_context.encrypt("admin")) db.session.add(default_user) db.session.commit() event.listen(User.__table__, 'after_create', insert_default)
def register_for_logging(cls): def biness_fkey_func(res,fkey): keytable = fkey[-1].column.table keytable_cols = list(keytable.columns) bid_exist = False for col in keytable_cols: if col.name == 'businessId': bid_exist = True break if bid_exist: res.append(keytable) else: if len(list(keytable.foreign_keys)) > 0: r = res # print r f = fkey # print f res = [] fkey = [] for newkey in list(keytable.foreign_keys): res.extend(r) fkey.extend(f) res.append(keytable) fkey.append(newkey) # print res # print fkey res,fkey = biness_fkey_func(res,fkey) if not(r == res) and not(f == fkey): break else: res.pop() fkey.pop() else: res.pop() fkey.pop() return res,fkey def after_insert_callback(mapper, connection, target): print "Executing after_insert on inserted instance of " + mapper.local_table.name print target if mapper.local_table.name == 'business': binessId = target.id else: if hasattr(target, 'businessId'): binessId = target.businessId else: keys = list(mapper.local_table.foreign_keys) if len(keys) > 0: for key in keys: fkey = [key] res = [mapper.local_table] result,frgnkey = biness_fkey_func(res,fkey) if result and frgnkey: # print result # print frgnkey idlist = [target.id] flag = 0 for i in range(len(frgnkey)): dfd = frgnkey[i].parent.key aaa = connection.execute(result[i].select().where(result[i].c.id == idlist[i])) aaa = aaa.fetchone() bbb = getattr(aaa, dfd) if bbb: idlist.append(bbb) else: idlist.append(None) flag = 1 break if flag == 0: finalrow = connection.execute(result[-1].select().where(result[-1].c.id == idlist[-1])) finalrow = finalrow.fetchone() binessId = getattr(finalrow, 'businessId') break else: binessId = None else: binessId = None else: binessId = None pids = get_project_Id(mapper, connection, target) print pids # if tableName == 'project': # projectId = target.id # elif hasattr(target,'projectId'): # projectId = target.projectId # elif tableName == 'channelsetup': # pass # # reach hardwaresetup and from there to project # elif tableName == 'hwchassismap' or tableName == 'hwchaschanmap' or tableName == 'acquisition': # pass # # reach hardwaresetup and from there to project # elif tableName == 'ONE OF THE DSP_RELATED_TABLES': # pass # else: # tableName == 'ONE OF THE REPORTING RELATED TABLES' # pass connection.execute( serversynclog.Serversynclog.__table__.insert(), {"rowid": target.id , "idtype": "global" , "tablename": mapper.local_table.name , "type": "create" , "bid": binessId , "pIds": json.dumps({'projects': pids})} ) def after_update_callback(mapper, connection, target): print "Executing after_update on updated instance of " + mapper.local_table.name # print inspect(target).unmodified # print inspect(target).expired_attributes print inspect(target).session.is_modified(target,include_collections=False) if inspect(target).session.is_modified(target,include_collections=False): if mapper.local_table.name == 'business': binessId = target.id else: if hasattr(target, 'businessId'): binessId = target.businessId else: keys = list(mapper.local_table.foreign_keys) if len(keys) > 0: for key in keys: fkey = [key] res = [mapper.local_table] result,frgnkey = biness_fkey_func(res,fkey) if result and frgnkey: idlist = [target.id] flag = 0 for i in range(len(frgnkey)): dfd = frgnkey[i].parent.key aaa = connection.execute(result[i].select().where(result[i].c.id == idlist[i])) aaa = aaa.fetchone() bbb = getattr(aaa, dfd) if bbb: idlist.append(bbb) else: idlist.append(None) flag = 1 break if flag == 0: finalrow = connection.execute(result[-1].select().where(result[-1].c.id == idlist[-1])) finalrow = finalrow.fetchone() binessId = getattr(finalrow, 'businessId') break else: binessId = None else: binessId = None else: binessId = None pids = get_project_Id(mapper, connection, target) print pids connection.execute( serversynclog.Serversynclog.__table__.insert(), {"rowid": target.id , "idtype": "global" , "tablename": mapper.local_table.name , "type": "update" , "bid": binessId , "pIds": json.dumps({'projects': pids})} ) def after_delete_callback(mapper, connection, target): print "Executing after_delete on deleted instance of " + mapper.local_table.name if mapper.local_table.name == 'business': binessId = target.id else: if hasattr(target, 'businessId'): binessId = target.businessId # keys = list(mapper.local_table.foreign_keys) # relationItems = mapper.relationships.items() # for r in relationItems: # print r # relationValues = mapper.relationships.values() # for r in relationValues: # print r.target.name # print r.local_columns # print r.local_remote_pairs # print r.table # print dir(relationItems[0]) # print dir(relationValues[0]) # print keys[0].parent # dfd = keys[0].parent.key # print dfd # biss = getattr(target, dfd) # print binessId # print biss # print biss == binessId else: keys = list(mapper.local_table.foreign_keys) if len(keys) > 0: for key in keys: fkey = [key] # print fkey res = [mapper.local_table] result,frgnkey = biness_fkey_func(res,fkey) if result and frgnkey: idlist = [] # print result # print frgnkey dfd = fkey[0].parent.key bbb = getattr(target, dfd) if bbb: idlist.append(bbb) result.pop(0) frgnkey.pop(0) flag = 0 # idlist = [target.id] for i in range(len(frgnkey)): dfd = frgnkey[i].parent.key aaa = connection.execute(result[i].select().where(result[i].c.id == idlist[i])) # print dir(aaa) aaa = aaa.fetchone() # print aaa bbb = getattr(aaa, dfd) if bbb: idlist.append(bbb) else: idlist.append(None) flag = 1 break if flag == 0: finalrow = connection.execute(result[-1].select().where(result[-1].c.id == idlist[-1])) finalrow = finalrow.fetchone() binessId = getattr(finalrow, 'businessId') break else: binessId = None else: binessId = None else: binessId = None else: binessId = None pids = get_project_Id(mapper, connection, target) print pids connection.execute( serversynclog.Serversynclog.__table__.insert(), {"rowid": target.id , "idtype": "global" , "tablename": mapper.local_table.name , "type": "delete" , "bid": binessId , "pIds": json.dumps({"projects": pids})} ) event.listen(cls, 'after_insert', after_insert_callback) event.listen(cls, 'after_update', after_update_callback) event.listen(cls, 'after_delete', after_delete_callback)
class ThreatLevel(db.Model): __tablename__ = 'threat_levels' id = db.Column(db.Integer, primary_key=True, autoincrement=True) name = db.Column(db.String) description = db.Column(db.String) form_description = db.Column(db.String) def update_distribution(mapper, connection, target): """ 0: 'Your organisation only', 1: 'This community only', 2: 'Connected communities', 3: 'All communities' """ if target.distribution == 2: target.distribution = 1 def save_attachment(mapper, connection, target): if target.attachment is not None: npath = os.path.join(ATTACHMENTS_PATH_IN, str(target.event_id)) if not os.path.exists(npath): os.makedirs(npath, 0755) with open(os.path.join(npath, str(target.id)), 'wb') as f: f.write(b64decode(target.attachment)) event.listen(Attribute, 'before_insert', update_distribution) event.listen(Event, 'before_insert', update_distribution) event.listen(Attribute, 'after_insert', save_attachment)
allow_tags = [ 'a', 'abbr', 'acronym', 'b', 'blockquote', 'code', 'em', 'i', 'li', 'ol', 'pre', 'strong', 'ul', 'h1', 'h2', 'h3', 'p' ] target.body_html = bleach.linkify(bleach.clean( markdown(value, output_format='html'), tags=allow_tags, strip=True)) @staticmethod def generate_fake(count=100): import forgery_py as forgery from random import randint user_count = User.query.count() for i in range(count): u = User.query.offset(randint(0, user_count-1)).first() post = Post(body=forgery.lorem_ipsum.sentences(randint(1, 3)), timestamp=forgery.date.date(True), author=u) db.session.add(post) db.session.commit() event.listen(Post.body, 'set', Post.on_body_changed) @login_manager.user_loader def load_user(user_id): return User.query.get(int(user_id))
def __declare_last__(cls): event.listen(cls, 'before_insert', cls.create_time) event.listen(cls, 'before_update', cls.update_time)