def db(): db = DB() db.begin_transation() yield db db.rollback_transaction()
def commentdelete(self): if request.method == 'POST': dbconn = DB() queries = ["DELETE FROM comment WHERE id= '{0}';".format(request.form['comment_id'])] dbconn.execute(queries); return jsonify([{ 'status':'success' }])
def commentadd(self): if request.method == 'POST': ctime = datetime.now().strftime("%Y-%m-%d %H:%M:%S"); dbconn = DB() queries = ["INSERT INTO comment(user_id, news_id, comment, comment_date) VALUES('{0}','{1}', '{2}', '{3}');".format( request.form['user_id'], request.form['news_id'], request.form['comment'], ctime)] result = dbconn.execute(queries); return jsonify([{ 'date':ctime, 'comment_id':result }])
def newsAdd(self): if request.method == 'POST': if self.mvalidate(request.form) is True: dbconn = DB() queries = ["INSERT INTO news(title, content, date, image) VALUES('{0}','{1}', current_timestamp, '{2}');".format( request.form['title'], request.form['content'], '', )] dbconn.execute(queries); flash('News succesfully added.') return redirect(url_for('news')) return render_template('newsAdd.html', error=self.error, params=request.form)
def __init__(self, app_dir, plex_dir, stalker_db_host,stalker_db_user, stalker_db_pass): dbinst = DB(app_dir, plex_dir, stalker_db_host, stalker_db_user, stalker_db_pass) self.db = dbinst self.ifcm = IfcMedia(dbinst) self.stvd = StalkerVideo(dbinst) self.ifcs = IfcSeasons(dbinst) self.stsn = StalkerSeasons(dbinst) self.ifep = IfcEpisodes(dbinst)
class News(object): dbconn = None error = '' def __init__(self): self.dbconn = DB() def getnews(self): queries = ["SELECT * FROM news ORDER BY DATE DESC;"] result = self.dbconn.execute(queries); return result; def getnewsmore(self, news_id): queries = ["SELECT * FROM news WHERE id = {0} ORDER BY DATE DESC;".format(news_id)] result = self.dbconn.execute(queries); return result; def getnewscomment(self, news_id): queries = ["SELECT a.id, b.firstname, b.lastname, a.comment, a.comment_date " "FROM comment a INNER JOIN user b ON b.id=a.user_id WHERE a.news_id={0} ORDER BY DATE DESC;".format(news_id)] result = self.dbconn.execute(queries); return result; #------------------------------ news add --------------------------------------------------------------------------- def newsAdd(self): if request.method == 'POST': if self.mvalidate(request.form) is True: dbconn = DB() queries = ["INSERT INTO news(title, content, date, image) VALUES('{0}','{1}', current_timestamp, '{2}');".format( request.form['title'], request.form['content'], '', )] dbconn.execute(queries); flash('News succesfully added.') return redirect(url_for('news')) return render_template('newsAdd.html', error=self.error, params=request.form) def mvalidate(self, form): self.error = '' if form['title'] == "": self.error = '- title is not filled<br>' if form['content'] == "": self.error += '- content is not filled<br>' if self.error != "": return False else: return True #------------------------------ comment add ------------------------------------------------------------------------ def commentadd(self): if request.method == 'POST': ctime = datetime.now().strftime("%Y-%m-%d %H:%M:%S"); dbconn = DB() queries = ["INSERT INTO comment(user_id, news_id, comment, comment_date) VALUES('{0}','{1}', '{2}', '{3}');".format( request.form['user_id'], request.form['news_id'], request.form['comment'], ctime)] result = dbconn.execute(queries); return jsonify([{ 'date':ctime, 'comment_id':result }]) #------------------------------ comment delete --------------------------------------------------------------------- def commentdelete(self): if request.method == 'POST': dbconn = DB() queries = ["DELETE FROM comment WHERE id= '{0}';".format(request.form['comment_id'])] dbconn.execute(queries); return jsonify([{ 'status':'success' }])
import json from flask import Flask from libs.groups import Grid from libs.db import DB DATA = ( [1, 1, 0, 1, 0, 0], [1, 1, 0, 1, 0, 0], [0, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 1], ) db = DB('192.168.50.178', 3306, 'demo', 'root', 'change_me') app = Flask(__name__) @app.route('/') def root(): return 'Hi!' @app.route('/count', methods=['GET']) def count_groups(): grid = Grid(()) return str(grid.count_groups()) @app.route('/count/<key>', methods=['GET']) def lookup(key): result = db.get_result(key)
# -*-coding:UTF-8-*- import sys import conf from libs.db import DB if __name__ == "__main__" : reload(sys) sys.setdefaultencoding('utf-8') sql = DB(conf.mysql.host, conf.mysql.username, conf.mysql.password, conf.mysql.db, conf.mysql.port, conf.mysql.charset) f = open('dict/spot', 'r') for line in open('dict/spot'): (title, address, hot) = f.readline().split("\t") if address == '-': address = '' sql.query("INSERT INTO spot (title, address, hot) VALUES('"+sql.escape(title)+"', '"+sql.escape(address)+"', '"+sql.escape(hot)+"')")
# -*-coding:UTF-8-*- import sys import conf from libs.db import DB from whoosh.index import create_in from whoosh.fields import * from whoosh.qparser import QueryParser from jieba.analyse import ChineseAnalyzer if __name__ == "__main__": reload(sys) sys.setdefaultencoding('utf-8') sql = DB(conf.mysql.host, conf.mysql.username, conf.mysql.password, conf.mysql.db, conf.mysql.port, conf.mysql.charset) all_data = sql.select("SELECT * FROM spot") analyzer = ChineseAnalyzer() schema = Schema(title=TEXT(stored=True, analyzer=analyzer), path=ID(stored=True), hot=NUMERIC(sortable=True)) ix = create_in("index", schema) writer = ix.writer() for data in all_data: writer.add_document( title=data["title"].strip() + u"\t" + data["address"].strip(), path=unicode(data["id"]),
argv.get("--confs", default="config.json"), encoding="utf-8" ) ) logger = Logger( fp=open(argv.get("--logfile", default="xpostrainlog.md"), mode="a+"), stream=sys.stdout ) logger.output("Loading...") from libs.db import DB # noqa E402 db = DB( host=argv.get("--dbhost", default="atlas"), dbname="syntextua" ) if argv.has("--tagparser"): argv.bundle["--tagparser"] = predef.inited(argv.get("--tagparser")) trainer = predef.inited( argv.get("--trainer"), db=db, settings=argv.getdict(), logger=logger ) logger.output("loaded.")
logger.output("Loading...") from libs.db import DB # noqa E402 predef = Predefinator( fp=open( argv.get("--confs", default="config.json"), encoding="utf-8" ) ) reader = predef.inited("ConlluReader") db = DB( host=argv.get("--dbhost", default="atlas"), dbname="syntextua" ) ctxt = predef.inited( "ContextualProcessorTrainer", db=db, logger=logger, recognizer=predef.inited( "MorphologyRecognizer", collection=db.cli.get_collection ) ) ctxt.train( limit=int(argv.get("--limit", default=0)), offset=int(argv.get("--offset", default=0))
import sys import conf from libs.db import DB from whoosh.index import create_in from whoosh.fields import * from whoosh.qparser import QueryParser from jieba.analyse import ChineseAnalyzer if __name__ == "__main__" : reload(sys) sys.setdefaultencoding('utf-8') sql = DB(conf.mysql.host, conf.mysql.username, conf.mysql.password, conf.mysql.db, conf.mysql.port, conf.mysql.charset) all_data = sql.select("SELECT * FROM spot") analyzer = ChineseAnalyzer() schema = Schema(title=TEXT(stored=True, analyzer=analyzer), path=ID(stored=True), hot=NUMERIC(sortable=True)) ix = create_in("index", schema) writer = ix.writer() for data in all_data: writer.add_document(title=data["title"].strip() + u"\t" + data["address"].strip(), path=unicode(data["id"]), hot=(int(data["hot"])+int(data["comment_num"])*10)) writer.commit()
# -*-coding:UTF-8-*- import sys import conf from libs.db import DB if __name__ == "__main__": reload(sys) sys.setdefaultencoding('utf-8') sql = DB(conf.mysql.host, conf.mysql.username, conf.mysql.password, conf.mysql.db, conf.mysql.port, conf.mysql.charset) f = open('dict/spot', 'r') for line in open('dict/spot'): (title, address, hot) = f.readline().split("\t") if address == '-': address = '' sql.query("INSERT INTO spot (title, address, hot) VALUES('" + sql.escape(title) + "', '" + sql.escape(address) + "', '" + sql.escape(hot) + "')")
) raise SystemExit predef = Predefinator( fp=open(argv.get("--confs", default="config.json"), encoding="utf-8")) logger = Logger(fp=open(argv.get("--logfile", default="amalog.md"), mode="a+", encoding="utf-8"), stream=sys.stdout) logger.output("Loading...") from libs.db import DB # noqa E402 db = DB(host=argv.get("--dbhost", default="atlas")) analyzer = predef.inited( "XPOSRecognitionAnalyzer", limit=int(argv.get("--limit", default=9e999)), recognizer=predef.inited( "MorphologyRecognizer", collection=lambda name: db.cli.get_collection(name))) logger.write(f"Connected to {analyzer.recognizer.collection.name}\n") generator = analyzer.init() logger.output( "Loaded succesfully.\n" "Here you'll see the analyzing progress. Numbers in the brackets counts "
# -*- coding: utf-8 -*- from peewee import Model, PrimaryKeyField, CharField, TimestampField, IntegerField, DateTimeField from libs.db import DB db = DB().postgresql class Weather(Model): id = PrimaryKeyField() location_code = IntegerField() location = CharField(max_length=50) timestamp = DateTimeField() high = IntegerField() low = IntegerField() precip = CharField(max_length=10) text_day = CharField(max_length=10) text_night = CharField(max_length=10) wind_direction = CharField(max_length=10) wind_direction_degree = CharField(max_length=10) wind_scale = CharField(max_length=10) wind_speed = CharField(max_length=10) @staticmethod def upsert_(location_code, location, timestamp, high, low, precip, text_day, text_night, wind_direction, wind_direction_degree, wind_scale, wind_speed): sql = """ insert into weather (location_code, location, timestamp, high, low, precip, text_day,
def __init__(self): self.dbconn = DB()
def gettopnews(self): dbconn = DB() queries = ["SELECT * FROM news ORDER BY DATE DESC limit 4;"] result = dbconn.execute(queries); return result;