def get(self, id): if not db().get_server_settings(str(id)): return 'Guild not found', 404 else: set = db().get_server_settings(str(id)) return set, 200
def __add_paper_lengths(self): print("Adding paper lengths...") import json paper_lengths = json.load( open( os.path.join(self.data_dir, 'keywords-backend', 'all_lengths.json'), 'r')) c = db().cursor() import re pattern = re.compile("\d") for paper_id, length in paper_lengths.items(): # Restore '/'. e.g. hep-th34523 -> hep-th/34523 pos = pattern.search(paper_id).start() if pos > 0: paper_id = paper_id[:pos] + '/' + paper_id[pos:] print(paper_id, length) c.execute( "UPDATE papers SET length=%(length)s " + "WHERE arxiv_id = %(arxiv_id)s", { 'length': length, 'arxiv_id': paper_id }) print("Committing") db().commit()
def put(): parser = reqparse.RequestParser() parser.add_argument('VIP') args = parser.parse_args() set = db().get_glob_settings set["VIP"] = args['VIP'] db().set_glob_settings(set)
def test_db_persistence(): ''' This test is a simple test of the DB, not important since the DB code and this test will need to get rewritten. ''' import tube, db from data import tension, leak tubes = db.db() dbman = db.db_manager() dbman.wipe('confirm') tube1 = tube.Tube() tube2 = tube.Tube() tube1.m_tube_id = "MSU0000001" tube2.m_tube_id = "MSU0000001" tube1.tension.add_record(tension.TensionRecord(350)) tube2.tension.add_record(tension.TensionRecord(355)) tube2.leak.add_record(leak.LeakRecord(0)) tubes.add_tube(tube1) tubes.add_tube(tube2) dbman.update() del tubes tubes = db.db() tube4 = tubes.get_tube("MSU0000001") assert len(tube4.tension.get_record('all')) == 2 assert tube4.leak.get_record('last').leak_rate == 0 del tubes tubes = db.db() dbman.wipe('confirm') with pytest.raises(KeyError): tube4 = tubes.get_tube("MSU0000001") assert tubes.size() == 0
def put(id): parser = reqparse.RequestParser() # Bool parser.add_argument('Greet') # | Message de bienvenue parser.add_argument('bl') # | Activation de la Blacklsit parser.add_argument('logging') # | Activation du logging parser.add_argument("automod") # | Activation de l'automodération # Integer parser.add_argument('GreetChannel') # | Salon de bienvenue parser.add_argument('LogChannel') # | Salon de logging # List parser.add_argument('Mods') # | Rôles de modération parser.add_argument('Admins') # | Rôles d'administration args = parser.parse_args() set = db().get_user_settings(str(id)) set["Greet"] = args['Greet'] set['bl'] = args['bl'] set['logging'] = args['logging'] set['automod'] = args['automod'] set['GreetChannel'] = args['GreetChannel'] set['LogChannel'] = args['LogChannel'] for admin in args["Admins"]: set['Admins'].append(admin) for mod in args["Mods"]: set['Mods'].append(mod) db().set_server_settings(str(id), set) return set, 201
def save_users(self): for u in context.save_users: db().user.save(u.src) for f in context.save_fights: db().fight.save(f.src) context.save_users = set() context.save_fights = set()
def __load_author_ids(self): sql = """ INSERT INTO analysis{0}_authors (author_id, train, first_paper_date) VALUES """.format(self.net.suffix_cuts) authors = self.net.get_train_authors(generate_if_not_exists=False) is_first = True for author_id, train in authors: if train == -1: train = 0 if is_first: is_first = False else: sql += ", " sql += "(%s, %s, '1970-01-01') " % (author_id, train) sql += "ON DUPLICATE KEY UPDATE train = VALUES(train)" db().cursor().execute(sql) db().commit()
def get_users(): if request.method == 'GET': return request_processor(True, db().get_data_from_table('users') or []) else: data = request.get('data') return request_processor(True, db().put_data_into_table('users', data) or [])
def submit(): if request.method == 'POST': log = request.get_json() db().db_insert(log) return "log accepted" else: return render_template('submit.html')
def __add_paper_authors_countries(self): countries = pickle.load( open( os.path.join(self.data_dir, 'keywords-backend', 'author_paper_countries'), 'rb')) c = db().cursor() import re pattern = re.compile("\d") for author_id, author_countries in enumerate(countries): print(author_id) for paper_id, country in author_countries: # Restore '/'. e.g. hep-th34523 -> hep-th/34523 pos = pattern.search(paper_id).start() if pos > 0: paper_id = paper_id[:pos] + '/' + paper_id[pos:] # Update c.execute( """UPDATE paper_authors AS pa INNER JOIN papers AS p ON p.id = pa.paper_id SET pa.country = %(country)s WHERE p.arxiv_id = %(arxiv_id)s AND pa.author_id = %(author_id)s""", { 'country': country, 'author_id': author_id, 'arxiv_id': paper_id }) print("Committing...") db().commit()
def share(): """ Show the list of desk to with the item can be push """ item = application.getItemByUUID(request.args(0)) if item is None: raise HTTP(404) query = (db.desk.id != session.desk_id) query &= auth.accessible_query('push_items', db.desk) posible_desk = db(query).select() fld_to_desk = Field('to_desk', 'integer') fld_to_desk.label = T("Push to organization desk") fld_to_desk.comment = T("Select where to push the item") fld_to_desk.requires = IS_EMPTY_OR( IS_IN_SET([(desk.id, desk.name) for desk in posible_desk])) fld_personal_desk = Field('to_person_desk', 'integer') fld_personal_desk.label = T("Push to other person desk") fld_personal_desk.comment = T("Select a person from the list.") # list of person on orgs persons = [] # search up all the persons orgs = db(db.organization.users.contains(auth.user.id)).select() for org in orgs: x = [db.auth_user(id=y) for y in org.users if y != auth.user.id] persons.extend(x) persons = list(set(persons)) fld_personal_desk.requires = IS_EMPTY_OR( IS_IN_SET([(per.id, "{} {}".format(per.first_name, per.last_name)) for per in persons])) fld_cond = Field('cond', 'boolean', default=False) fld_cond.label = T('To other person?') form = SQLFORM.factory(fld_to_desk, fld_personal_desk, fld_cond, submit_button=T("Send"), table_name='share') if form.process().accepted: src = session.desk_id if form.vars.cond: # send the item to other user other_user = db.auth_user(form.vars.to_person_desk) target = application.getUserDesk(other_user).id else: # send the item to the selected desk target = form.vars.to_desk if target: ct = application.getContentType(item.item_type) ct.shareItem(item.unique_id, src, target) response.js = "$('#metaModal').modal('hide');" response.flash = None return locals()
def save(self, force=False): ' Сохраняет в бд ' self.src['data'] = self.js_save() if force or self.is_new: self.is_new = False db().fight.save(self.src) else: context.save_fights.add(self)
def __add_num_authors(self): db().cursor().execute("""UPDATE papers AS p SET p.num_authors = ( SELECT COUNT(*) FROM paper_authors AS pa WHERE pa.paper_id = p.id )""") db().commit()
def search(): if request.method == 'POST': query = request.get_data().decode('utf-8') logs = db().db_query(query) return jsonify(list(logs)) else: logs = db().db_query() return render_template('search.html', logs=list(logs))
def delete(): dash = db.dashboard(request.args(0)) db(db.dashboard.id == dash.id).delete() session.dashboard = None redirect(URL('default', 'index')) return CAT()
def generate_hindex_data(self): print("Generating hindex data...") c2 = db().cursor() c2.execute( """ SELECT COUNT(*) FROM analysis{0}_hindex_data WHERE predict_after_years = %(predict_after_years)s """.format(self.suffix_cuts), {'predict_after_years': self.predict_after_years}) if c2.fetchone()[0] > 0: print("-> Already exists, skipping") return if self.cutoff == self.CUTOFF_SINGLE: # Optimized version for single cutoff return self.__generate_hindex_data_single() c = db().cursor() c.execute("""SELECT author_id, first_paper_date FROM analysis{0}_authors""".format(self.suffix_cuts)) for row in c: author_id = row[0] first_paper_date = row[1] split_date = self.get_split_date(first_paper_date) end_date = self.get_end_date(first_paper_date) hindex_before = self.__get_hindex_of_author_fast( author_id, start_date=first_paper_date, end_date=split_date) hindex_after = self.__get_hindex_of_author_fast( author_id, start_date=split_date, end_date=end_date) hindex_cumulative = self.__get_hindex_of_author_fast( author_id, start_date=first_paper_date, end_date=end_date) c2.execute( """ INSERT INTO analysis{0}_hindex_data SET author_id=%(author_id)s, predict_after_years=%(predict_after_years)s, hindex_before=%(hindex_before)s, hindex_after=%(hindex_after)s, hindex_cumulative=%(hindex_cumulative)s """.format(self.suffix_cuts), { 'author_id': author_id, 'predict_after_years': self.predict_after_years, 'hindex_before': hindex_before, 'hindex_after': hindex_after, 'hindex_cumulative': hindex_cumulative }) print(author_id, hindex_before, hindex_after, hindex_cumulative, first_paper_date.strftime("%Y-%m-%d")) print("Committing") db().commit()
def get(self): try: DUOSHUO = db.db("SELECT DUOSHUO FROM Ver")[0][0] except: DUOSHUO = '' NAV = db.db("SELECT ID,NAME,LINK FROM LINK WHERE TYPE='nav'") LINK = db.db("SELECT ID,NAME,LINK FROM LINK WHERE TYPE='link'") LAST = db.db("SELECT ID,NAME,Remark,HTML FROM Other WHERE LOCATION='last'") self.render("custom.html",conf=configure,DUOSHUO = DUOSHUO,NAV = NAV,LINK = LINK,LAST = LAST)
def find_transients(name=None, lean=False): query = (db.transients.name==name) if name is not None else (db.transients.id>0) rows = db(query).select(db.transients.ALL).as_list() if not lean: for row in rows: row['events'] = db(db.intensities.trans_id==row['id']).select(db.intensities.ALL).as_list() return rows
def setPics(piclist, type=1): for data in piclist: try: data['created_at'] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) data['pid'] = type db.db().insert('meizitu', data) except Exception as e: print(e)
def add_device(device_token): row = db(db.devices.id==device_token).select() row = row.first() if row else None if row is None: db.devices.insert(id=device_token) else: db(db.devices.id==device_token).update(last_seen=datetime.datetime.now()) row = db(db.devices.id==device_token).select().first() return row.as_dict()
def setContents(duanzilist, type=1): for data in duanzilist: try: data['aid'] = data['aid'] data['created_at'] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) data['pid'] = type db.db().insert('duanzi', data) except Exception as e: print(e)
def setComments(comments, type=3): for itemCom in comments: for com in itemCom: try: com['created_at'] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) com['pid'] = type db.db().insert('comments', com) except Exception as e: print(e)
def __init__(self): self.NEW = db.db("SELECT TITLE,SHORT,ID,tags,CREATETIME FROM MY WHERE LEIBIE<>'' ORDER BY CREATETIME DESC LIMIT 10") #最新10篇文章 self.LEIBIE = db.db("SELECT LEIBIE FROM MY WHERE LEIBIE<>'' GROUP BY LEIBIE") #取类别 self.HASH = db.db("SELECT HASH FROM MY WHERE LEIBIE<>'' GROUP BY HASH") #取日期归档 self.LINK = db.db("SELECT ID,NAME,LINK FROM LINK WHERE TYPE='link'") #取友情链接 self.nav = db.db("SELECT ID,NAME,LINK FROM LINK WHERE TYPE='nav'") #取导航链接 self.Other = db.db("SELECT HTML FROM Other WHERE TYPE='belong' and LOCATION='last'") #取文章附加內容 self.conf = db.db("SELECT SITENAME,subtitle,description,keywords FROM CONFIG") #取站點信息設定 self.DUOSHUO = db.db("SELECT DUOSHUO FROM Ver")[0][0] self.JSCODE = db.db("SELECT HTML FROM Other WHERE NAME='JSCODE'")[0][0] self.NAME = self.conf[0][0] self.Subtitle = self.conf[0][1] self.description = self.conf[0][2] self.keywords = self.conf[0][3] self.dict = {} self.dict['HASH']=[i[0] for i in self.HASH] self.dict['LEIBIE']=[i[0] for i in self.LEIBIE] self.dict['NAME'] = self.NAME self.dict['Subtitle'] = self.Subtitle self.dict['description'] = self.description self.dict['tags'] = self.keywords self.dict['link'] = [(i[1],i[2]) for i in self.LINK] self.dict['nav'] = [(i[1],i[2]) for i in self.nav] self.dict['LIST'] = [(i[0],i[2]) for i in self.NEW] self.dict['TIME'] = '' self.dict['ID'] = '0' self.dict['title'] = self.NAME
def __getitem__(self, keys): """ Allows access to table rows. Key should be a tuple that specifies the id of the record in index 0, and the columns in the following indexes. If no columns are specified the entire row is returned. """ if not isinstance(keys, tuple): if keys == 0: return self.columns() return db().select(self.name, where="id=%s"%keys) return db().select(self.name, cols=list(keys[1:]), where="id=%s"%keys[0])
def todayId(self): """ Returns id in date table of date matching todays date. If no record is found a new record is created. """ wherestr = 'year=%s and month=%s and day=%s'%(self.year, self.month, self.day) today = db().select('date', cols='id', where=wherestr) if today == "": db.insert('date', {'day':self.day, 'month':self.month, 'year': self.year}) today = db().select('date', cols='id', where=wherestr) return today
def save(self, force=False): """ Сохраняет данные пользователя в бд. """ self.src["data"] = self.js_save() self.src["loc"] = self["loc"] self.src["exp"] = self["exp"] self.src["level"] = self["level"] if force: db().user.save(self.src) else: context.save_users.add(self)
def reward_factor(self, other_id): id1, id2 = sorted([self.id, other_id]) duel = db().duels.find_one({"id1": id1, "id2": id2}) if duel == None: duel = {"id1": self.id, "id2": other_id, "count": 0, "date": now()} elif duel["date"] <= now() - 24 * 3600: duel.update({"count": 0, "date": now()}) duel["count"] += 1 # log_debug('Count: ' + str(duel['count'])) db().duels.save(duel) return 0.75 ** (duel["count"] - 1)
def setReplay(replayList, type=1): for data in replayList: try: if len(data) > 0: for item in data: item['created_at'] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) item['pid'] = type db.db().insert('replay', item) except Exception as e: print(e)
def __plugin_photoset_item_on_delete(s): item = s.select().first() if item.item_type == 'photoset': # cleanup here cnt = db.plugin_photoset_content(item_id=item.unique_id) db(db.plugin_photoset_photo.id.belongs( cnt.photoset)).delete() db(db.plugin_photoset_content.item_id == item.unique_id).delete() return False # remember to procced
def __plugin_picture_item_on_delete(s): item = s.select().first() if item.item_type == 'picture': # cleanup here cnt = db.plugin_picture_info(item_id=item.unique_id) db(db.plugin_picture_rendition.id.belongs( cnt.renditions)).delete() db(db.plugin_picture_info.item_id == item.unique_id).delete() return False # remember to procced
def set_var(key, value): global world_mem if value is None: value = False world_mem[key] = value #value_json = value # to_json(value).encode('utf8') db().world.save({"_id": key, "value": value}) #v = WorldVars.get_or_insert(key) #if v.value != value_json: # v.value = value_json # v.put()
def post(self): user = self.get_secure_cookie("WLBLOG") username=self.get_argument('newuser','') oldpwd=self.get_argument('oldpwd','') pwd1=self.get_argument('pwd1','') if db.check(user,oldpwd): if not username: username=user db.db("UPDATE Ver SET PASSWORD='******',USERNAME='******' WHERE USERNAME='******'" % (pwd1,username,user)) self.write("0") else: self.write("密码修改失败,请确认你的输入!")
def time_ips(): while True: if db().check_db('ipList2') < config.ipList2_min: print("备用池存储数据少于临界值,准备填数据") ips_list_k = get_ips().Crawling_ips(config.kuaidaili) ips_list_x = get_ips().Crawling_ips(config.xicidaili) ips_list = ips_list_x + ips_list_k db().insert_db(ips_list) else: print("备用池数据充足") print(config.ipList2_check_interval_time, '秒后再检查数据库') time.sleep(config.ipList2_check_interval_time)
def update_setting(setting, value): with db().cursor() as cursor: try: cursor.execute("SELECT * FROM settings WHERE setting = %s", setting) if cursor.rowcount == 0: cursor.execute("INSERT INTO settings (setting, value) VALUES (%s, %s)", (setting, value)) else: cursor.execute("UPDATE settings SET value=%s WHERE setting=%s ", (value, setting)) db().commit() except: db().rollback() raise
def run(self): c = db().cursor() for year in range(self.year_first, self.year_last + 1): file = os.path.join(self.data_dir, 'JournalHomeGrid-' + str(year) + '.csv') for row in csv.reader(open(file, 'r')): # Title row if row[0].startswith('Journal Data Filtered By: ' + 'Selected JCR Year: ' + str(year)): continue # Header row if row[0] == 'Rank': continue # Copyright row if row[0].startswith('Copyright') or \ row[0].startswith('By exporting'): continue # Actual content journal = row[2] issn = row[3] jif = row[5] if jif == 'Not Available': continue # Normalize journal journal = journal.lower() \ .replace(' ', '').replace(':', '').replace('-', '') print(journal, year, jif) c.execute( """ INSERT INTO jif SET journal = %(journal)s, issn = %(issn)s, year = %(year)s, jif = %(jif)s""", { 'journal': journal, 'issn': issn, 'year': year, 'jif': jif }) print("Committing...") db().commit()
def __plugin_photoset_item_on_delete(s): item = s.select().first() if item.item_type == 'photoset': # cleanup here cnt = db.plugin_photoset_content(item_id=item.unique_id) db( db.plugin_photoset_photo.id.belongs( cnt.photoset)).delete() db( db.plugin_photoset_content.item_id == item.unique_id ).delete() return False # remember to procced
def conf(): #全局设定信息 global NAME,Subtitle,description,keywords,Category,UUID conf = db.db("SELECT SITENAME,subtitle,description,keywords,uuid FROM CONFIG")[0] NAME = conf[0] Subtitle = conf[1] description = conf[2] keywords = conf[3] UUID= conf[4] if not UUID: UUID=base64.b64encode(uuid.uuid4().bytes + uuid.uuid4().bytes) print db.exe("UPDATE config SET uuid='%s' WHERE ID=1" % UUID) Category = [(i[0],i[1]) for i in db.db("SELECT ID,Category FROM Category")] Category.append((' ',' '))
def post(self): Result = True NAME=self.get_argument('bkname','') Subtitle=self.get_argument('subtitle','') description=self.get_argument('description','') keywords=self.get_argument('keywords','') try: db.db("update CONFIG SET SITENAME='%s',subtitle='%s',description='%s',keywords='%s' WHERE ID=1 " % (NAME,Subtitle,description,keywords)) except: self.write("ERROR") else: tohtml.html().ALL() self.write("0")
def __exit__(self, exc_type, exc_val, exc_tb): # Reset suffix self.net.suffix = self.orig_suffix self.net.suffix_author_ids = self.orig_suffix_author_ids # Reset train values try: self.c.execute(""" UPDATE analysis{0}_authors SET train = train_real""".format(self.net.suffix_cuts)) db().commit() except (OperationalError, InterfaceError): pass
def __plugin_picture_item_on_delete(s): item = s.select().first() if item.item_type == 'picture': # cleanup here cnt = db.plugin_picture_info(item_id=item.unique_id) db( db.plugin_picture_rendition.id.belongs( cnt.renditions)).delete() db( db.plugin_picture_info.item_id == item.unique_id ).delete() return False # remember to procced
def delete(): """ Delete an Item. """ item = application.getItemByUUID(request.args(0)) item_id = item.id # remove the item from the index # TODO: move this to Application if needed Whoosh().remove(item.unique_id) db(db.item.id == item.id).delete() return ''
def get_requests(): if request.method == 'GET': results = db().get_data_from_table('requests') if results: for i in results: i['notifications'] = json.loads(i['notifications']) i['request'] = json.loads(i['request']) return request_processor(True, results or []) else: data = request.json data['notifications'] = json.dumps(data['notifications']) data['request'] = json.dumps(data['request']) response = db().put_data_into_table('requests', data) or [] add_job(data) return request_processor(True, response)
def drop_offline_users(): try: time_limit = now() - LAST_ACTION_TIME_LIMIT for u in db().user.find({"online": True, "last_action": {"$lt": time_limit}}): try: user = User(u) user.do_logout() except: log_exception() log_warning('Error trying to log out user %s. Forcing logout...' % u.get('name')) db().user.update({"_id": u["_id"]}, {"$set": {"online": False}}) add_timeout("drop_offline_users", LAST_ACTION_TIME_LIMIT) except: log_exception() log_error('Error in task drop_offline_users.')
def get(self): conf() try: DUOSHUO = db.db("SELECT DUOSHUO FROM Ver")[0][0] except: DUOSHUO = '' NAV = db.db("SELECT ID,NAME,LINK FROM LINK WHERE TYPE='nav'") LINK = db.db("SELECT ID,NAME,LINK FROM LINK WHERE TYPE='link'") LAST = db.db("SELECT ID,NAME,Remark,HTML FROM Other WHERE LOCATION='last'") self.render("custom.html",title=NAME,NAME=NAME, Category = Category, Subtitle = Subtitle, description=description, keywords=keywords,DUOSHUO = DUOSHUO,NAV = NAV, LINK = LINK,LAST = LAST)
def search_transactions(user, query, qtype=None): """Search transactions using an :param object user: The user to get transactions for :param string query: What to search for """ query = query.strip() _db = db.db() transactions = [] if qtype is None: if re_date.match(query): print "searching date" _db.cur().execute("SELECT * FROM transactions WHERE user_id = ? AND transaction_date = ?;", (user.id, "{} 00:00:00.000".format(query))) elif re_amount.match(query): print "searching amount" _db.cur().execute("SELECT * FROM transactions WHERE user_id = ? AND (credit_amount = ? OR debit_amount = ?);", (user.id, query, query)) elif re_amount_range.match(query): print "searching amount range" left, right = query.split('-') _db.cur().execute("SELECT * FROM transactions WHERE user_id = ? AND ((credit_amount >= ? AND credit_amount <= ? ) OR (debit_amount >= ? AND debit_amount <= ?));", (user.id, left.strip(), right.strip(), left.strip(), right.strip())) else: print "searching default" _db.cur().execute("SELECT * FROM transactions WHERE user_id = ? AND transaction_description LIKE ?;", (user.id, "%{}%".format(query))) for row in _db.cur(): transactions.append(transaction(row)) return transactions
def create_database(): """ Initialize a database and create the table if not present and return True """ global conn conn = db('./data/db/matches.db') conn.create_table(create_match_sql())
def share(): """ Show the list of desk to with the item can be push """ item = application.getItemByUUID(request.args(0)) if item is None: raise HTTP(404) query = (db.desk.id != session.desk_id) query &= auth.accessible_query('push_items', db.desk) posible_desk = db(query).select() fld_to_desk = Field('to_desk', 'integer') fld_to_desk.label = T("Push to") fld_to_desk.comment = T("Select where to push the item") fld_to_desk.requires = IS_IN_SET([(desk.id, desk.name) for desk in posible_desk]) form = SQLFORM.factory(fld_to_desk, submit_button=T("Send"), table_name='share') if form.process().accepted: # send the item to the selected desk ct = application.getContentType(item.item_type) ct.shareItem(item.unique_id, session.desk_id, form.vars.to_desk) response.js = "$('#metaModal').modal('hide');" return locals()
def get_nonzero_author_ids(self): filename = os.path.join(self.net.data_dir, 'author_ids-%s.npy' % self.orig_suffix) try: nonzero_author_ids = np.load(filename) except FileNotFoundError as e: from db import db sql = """ SELECT a.author_id FROM analysis{0}_authors AS a INNER JOIN analysis{0}_hindex_data AS h ON h.author_id = a.author_id WHERE h.predict_after_years = 1 AND h.hindex_cumulative = 0 """.format(self.net.suffix_cuts) c = db().cursor() numauthors = c.execute(sql) nonzero_author_ids = np.fromiter(c, count=numauthors, dtype=[('author_id', 'i4') ])['author_id'] np.save(filename, nonzero_author_ids) return nonzero_author_ids
def main(): dbObj = db(Config.MySQLconnector["host"], Config.MySQLconnector["user"], Config.MySQLconnector["password"], Config.MySQLconnector["database"]) dbConn = dbObj.connector() logTableTags = dbObj.runQuery(dbConn, "SELECT tag FROM logs GROUP BY tag;") tagTableTags = dbObj.runQuery(dbConn, "SELECT tag FROM tags;") logTableHosts = dbObj.runQuery(dbConn, "SELECT host FROM logs GROUP BY host;") hostTableHosts = dbObj.runQuery(dbConn, "SELECT host FROM hosts;") tagTags = list() hostHosts = list() for T in tagTableTags: tagTags.append(T["tag"]) for H in hostTableHosts: hostHosts.append(H["host"]) for T in logTableTags: if T["tag"] not in tagTags: query = "INSERT INTO tags(`tag`) VALUE('%s');" % T["tag"] dbObj.runQuery(dbConn, query) for H in logTableHosts: if H["host"] not in hostHosts: query = "INSERT INTO hosts(`host`) VALUE('%s');" % H["host"] dbObj.runQuery(dbConn, query) return
def set_type(self, transaction_id, category_id): """set the transaction type on a transaction in the database""" # AJAX function _db = db.db() _db.cur().execute("UPDATE transactions SET ttype = ? WHERE id = ?", (category_id, transaction_id)) _db.conn().commit() return ''
def create(): if 'auth' in session: return redirect(url_for('home')) tid = request.form["tid"] uid = request.form["uid"] tname = request.form["tname"] turl = request.form["turl"] uname = request.form["uname"] chlist = request.form.getlist("chlist") auth = request.form["auth"] lists = '' for ch in chlist: if lists == '': lists = lists + ch else: lists = lists + '|m|' + ch try: db_obj = db.db() db_obj.signup(tid, uid, tname, turl, uname, lists) session["auth"] = auth session["tid"] = tid session["uid"] = uid session["turl"] = turl session["uname"] = uname.capitalize() session["tname"] = tname.capitalize() session["channels"] = lists return redirect(url_for('home')) except Exception: return "Error"
def home(): # get paging parameter from GET string try: page = int(request.args.get('page', 1)) except ValueError: page = 1 # set detault to 1 if page is less than 1 if page < 1: page = 1 cursor = db().cursor() sql = ("SELECT DISTINCT movie_imdb_rating.movie_id, COUNT(*) max_count " "FROM poster INNER JOIN movie_imdb_rating " "ON movie_imdb_rating.movie_id=poster.movie_id " "ORDER BY rating_wa DESC ") cursor.execute(sql) max_count = cursor.fetchone()['max_count'] cursor.execute( "SELECT DISTINCT movie_imdb_rating.movie_id, poster.poster, " "poster.title, movie_imdb_rating.rating_wa " "FROM poster INNER JOIN movie_imdb_rating " "ON movie_imdb_rating.movie_id=poster.movie_id " "ORDER BY rating_wa DESC " "LIMIT {}, {}".format( (page - 1) * PAGE_SIZE, PAGE_SIZE) ) page_count = math.ceil(max_count / PAGE_SIZE) return render_template('movie.html', movies=cursor, page_count=page_count, page=page, page_size=PAGE_SIZE)
def __init__(self, q): self.version_name = u"freQ" self.version_version = u"1.0.0 (20080118)" self.version_os = u"Twisted %s, Python %s" % (twisted.__version__, sys.version) self.authd = 0 self.wrapper = wrapper() self.wrapper.onauthd = self.onauthd self.wrapper.register_handler(self.iq_handler, "iq", "get") self.wrapper.c.addBootstrap("//event/client/basicauth/authfailed", self.failed) self.wrapper.c.addBootstrap("//event/client/basicauth/invaliduser", self.failed) print "ok" self.g = {} self.alias_engine = None self.plug = pluginloader(self, q) self.muc = muc(self) self.log = log.logger() self.log.log("freQ %s started with pid=%s" % (self.version_version, os.getpid())) self.cmdhandlers = [] if config.ENABLE_SQLITE: self.db = db.db() self.wrapper.register_msg_handler(self.call_cmd_handlers, u".*") try: tl = config.LOGF except: tl = "./twisted.log" if not os.access(tl, 0): fp = file(tl, "w") fp.write("# freQ\n") fp.close() twisted.python.log.startLogging(open(tl, "a")) twisted.python.log.addObserver(self.error_handler)
def test_db_add_tube(): ''' This test is a simple test of adding tubes to the DB, not important since the DB code and this test will need to get rewritten. ''' import tube, db from data import swage, tension, leak, dark_current tubes = db.db() dbman = db.db_manager() dbman.wipe('confirm') tube1 = tube.Tube() tube2 = tube.Tube() tube1.m_tube_id = "MSU0000001" tube2.m_tube_id = "MSU0000001" tube1.tension.add_record(tension.TensionRecord(350)) tube2.tension.add_record(tension.TensionRecord(355)) tube2.leak.add_record(leak.LeakRecord(0)) tubes.add_tube(tube1) dbman.update() tube3 = tubes.get_tube("MSU0000001") assert len(tube3.tension.get_record('all')) == 1 with pytest.raises(IndexError): tube3.leak.get_record('last').leak_rate == 0 tubes.add_tube(tube2) dbman.update() tube4 = tubes.get_tube("MSU0000001") assert len(tube4.tension.get_record('all')) == 2 assert tube4.leak.get_record('last').leak_rate == 0
def __init__(self, url_id): self.database = db() sql = 'SELECT * FROM sp_list_urls WHERE ID=%d LIMIT 1' % url_id data = self.database.findone(sql) self.website_id = data['website_id'] self.url = data['url'] self.rule_id = data['rule_id'] sql = 'SELECT * FROM sp_websites WHERE ID=%d LIMIT 1' % self.website_id data = self.database.findone(sql) self.currency = data['currency'] sql = 'SELECT * FROM sp_list_pattern WHERE ID=%d LIMIT 1' % self.rule_id data = self.database.findone(sql) self.pattern = data['pattern'] # 通用采集范围规则 self.name_pattern = data['name_pattern'] # 商品名称采集规则 self.price_pattern = data['price_pattern'] # 价格采集规则 self.img_pattern = data['img_pattern'] # 图片采集规则 self.img_pattern_attr = data['img_pattern_attr'] # 图片抽取规则 self.page = data['page'] # 页码变量名称 self.page_step = int(data['page_step']) # 页码增幅 self.detail_url_pattern = data['url_pattern'] # 商品详情页的链接地址匹配规则 self.detail_url_pattern_attr = data['url_pattern_attr'] # 商品详情页的抽取规则 conf = config() self.__phantomjs = conf.phantomjs_path self.__list_spide_pages = conf.list_spide_pages self.__timewait = conf.timewait
def create(team1, team2, start_time = None, _type = None): ''' Создает новую битву. list team1, list team2 - участники, могут быть User или Mob, User-ы автоматически входят в битву. ''' #src = FightModel(data=to_json({"status": "init"}, True)) #src.put() src = {"data": {"status": "init"}} id = db().fight.insert(src) f = Fight(str(id)) f.is_new = True f._type = _type f.p = [[], []] for x in team1: f.add_fighter(x, 0) for x in team2: f.add_fighter(x, 1) if start_time != None: f.start = start_time else: f.start = now() f.turn = 0 f.status = "active" f.winner = -1 f.anim = [] f.recount_stats() if f.over(): return f.begin_turn()
def side_menu(): query = (db.dashboard.id > 0) query &= (db.dashboard.created_by == auth.user.id) dash_list = db(query).select(db.dashboard.ALL) alinks = request.vars.alinks return dict(dash_list=dash_list, alinks=alinks)
def index(): item = application.getItemByUUID(request.args(0)) if item is None: raise HTTP(404) short = request.vars.short if request.vars.short is not None else False tbl = db.plugin_comment_comment tbl.item_id.default = item.unique_id form = SQLFORM( tbl, submit_button=T('Comment'), formstyle='bootstrap3_stacked') rows = db( (tbl.id > 0) & (tbl.item_id == item.unique_id) ).select(orderby=~tbl.created_on) if form.process().accepted: response.js = "jQuery('#%s').get(0).reload();" % request.cid # send notifications to the users, except the current one subject = T("Comments on %s", (item.headline,)) # get the comment body comment = tbl(form.vars.id) message = response.render( 'plugin_comment/someone_commented.txt', dict(item=item, comment=comment, user=auth.user) ) application.notifyCollaborators( item.unique_id, subject, message ) return dict(form=form, comments=rows, short=short, item=item)
def get_locations(zone,district): db=database.db() sites=[] dists=[] #Get all sites and districts if district!="All": sites=db.query("SELECT location_id,name from camp_locations where belongs_to='"+district+"'").getresult() if zone!="All": districts=db.query("SELECT location_id,name from camp_locations where belongs_to='"+zone+"'").getresult() else: districts=db.query("SELECT location_id,name from camp_locations where location_type='district' ").getresult() else: if zone!="All": districts=db.query("SELECT location_id,name from camp_locations where belongs_to='"+zone+"'").getresult() for d in districts: sites+=db.query("SELECT location_id,name from camp_locations where belongs_to='"+d[0]+"'").getresult() else: districts=db.query("SELECT location_id,name from camp_locations where location_type='district' ").getresult() sites=db.query("SELECT location_id,name from camp_locations where location_type='site'").getresult() zones=db.query("SELECT location_id,name from camp_locations where location_type='zone'").getresult() return zones,districts,sites