def init_web_db(): parser = ConfigParser.SafeConfigParser() parser.optionxform = str parser.read("config.cfg") section = 'database' if 'database' not in parser.sections(): raise Exception("No database section in config.cfg") dbn = parser.get(section, 'dbn') if dbn == "mysql": db = parser.get(section, 'db') user = parser.get(section, 'user') pw = parser.get(section, 'pw') host = parser.get(section, 'host') db = web.database(dbn='mysql', db=db, user=user, pw=pw, host=host) db.query('SET NAMES utf8;') db.query('SET CHARACTER SET utf8;') db.query('SET character_set_connection=utf8;') elif dbn == "sqlite": dbname = parser.get(section, 'db') db = web.database(dbn='sqlite', db=dbname) # We need to mimic some MySQL functions in order to be able to use # SQLite or use different SQL commands for each database server. I # prefer the 1st option, naturally... db._db_cursor().connection.create_function("concat", 2, sqlite_concat) db._db_cursor().connection.create_function("conv", 3, sqlite_conv) db._db_cursor().connection.create_function("instr", 2, sqlite_instr) db._db_cursor().connection.create_function("rand", 0, sqlite_rand) return db
def cargar(self): db = web.database(dbn="mysql", user="******", pw="tapw2014", db="blockbuster") #consultas cliente = db.select("clientes") rentas = db.query("SELECT rentas.id_renta,rentas.id_cliente,rentas.id_pelicula,fecha_renta,fecha_entrega,titulo,entregada FROM `rentas`,peliculas WHERE rentas.id_pelicula=peliculas.id_pelicula") peliculas = db.select("peliculas") ubicaciones = db.select("ubicacion") generos = db.select ("generos", order="genero DESC") #crear cliente.json cliente_row = cliente.list() file_cliente=open('static/json/clientes.json','w') json.dump(cliente_row,file_cliente) file_cliente.close() db = web.database(dbn="mysql", user="******", pw="tapw2014", db="blockbuster") #crear rentas.json rentas_row = rentas.list() file_rentas=open('static/json/rentas.json','w') json.dump(rentas_row,file_rentas) file_rentas.close() #crear peliculas.json peliculas_row = peliculas.list() file_peliculas = open('static/json/peliculas.json','w') json.dump(peliculas_row,file_peliculas) #crear ubicacion.json ubicaciones_row = ubicaciones.list() file_ubicaciones = open('static/json/ubicacion.json', 'w') json.dump(ubicaciones_row,file_ubicaciones) file_ubicaciones.close() #crear generos.json generos_row = generos.list() file_generos = open('static/json/generos.json','w') json.dump(generos_row,file_generos) file_generos.close()
def __init__(self): self.db1 = web.database(dbn='mysql', db = 'episode', user = '******', pw= 'abc111--', host = '127.0.0.1') self.db1.printing = True self.db2 = web.database(dbn='mysql', db = 'episode_soc', user = '******', pw= 'abc111--', host = '127.0.0.1') self.db2.printing = True self.db3 = web.database(dbn='mysql', db = 'cl_episode', user = '******', pw= 'abc111--', host = '127.0.0.1') self.db3.printing = True self.db4 = web.database(dbn='mysql', db = 'cl_episode_soc', user = '******', pw= 'abc111--', host = '127.0.0.1') self.db4.printing = True
def setup_database(dbname, pooling=False): if dbname == 'sqlite': db = web.database(dbn=dbname, db='webpy.db', pooling=pooling) else: db = web.database(dbn=dbname, db='webpy', user='******', pw='tiger', pooling=pooling) if '-v' in sys.argv: db.printing = True return db
def codecrawler(): proj=web.database(host='222.201.131.208', port=3306, dbn='mysql', db='proj', user='******', pw='scutensave') daplatform=web.database(dbn='mysql', db='daplatform', user='******', pw='123') starttime=datetime.datetime.now() print '开始收集数据' results=proj.select("type_translate", order='code DESC') for record in results: daplatform.insert("type_translate", code=record.code, type=record.type) endtime=datetime.datetime.now() print '耗时 :', (endtime-starttime).seconds ,' s'
def connect(test_db=False): global db if public.isTest(): db = web.database(port=5432, host='127.0.0.1', dbn='postgres', db='parking', user='******', pw='parking') else: db = web.database(port=5432, host='10.1.1.100', dbn='postgres', db='parking', user='******', pw='parking)()#') try: db.set_client_encoding('UTF8') except (AttributeError): db.query("set client_encoding to 'UTF-8'")
def setup_database(dbname, driver=None, pooling=False): if dbname == 'sqlite': db = web.database(dbn=dbname, db='webpy.db', pooling=pooling, driver=driver) elif dbname == 'postgres': user = os.getenv('USER') db = web.database(dbn=dbname, db='webpy', user=user, pw='', pooling=pooling, driver=driver) else: db = web.database(dbn=dbname, db='webpy', user='******', pw='tiger', pooling=pooling, driver=driver) db.printing = True return db
def setup_database(dbname, driver=None, pooling=False): if dbname == "sqlite": db = web.database(dbn=dbname, db="webpy.db", pooling=pooling, driver=driver) elif dbname == "postgres": user = os.getenv("USER") db = web.database(dbn=dbname, db="webpy", user=user, pw="", pooling=pooling, driver=driver) else: db = web.database(dbn=dbname, db="webpy", user="******", pw="tiger", pooling=pooling, driver=driver) db.printing = True return db
def crawler(): proj=web.database(host='222.201.131.208',port=3306,dbn='mysql',db='proj',user='******',pw='scutensave') daplatform=web.database(dbn='mysql',db='daplatform',user='******',pw='123') starttime=datetime.datetime.now() print 'start crawler...' total=0 while (total<=10000): results=proj.select("text", order='id DESC', limit=100, offset=total) for record in results: daplatform.insert('originText',id=record.id,title=record.title,type=record.type,date=record.date,accuser=record.accuser,defendant=record.defendant,publicProsecutionOrgan=record.publicProsecutionOrgan,authorizedAgent=record.authorizedAgent,issue=record.issue,crime=record.crime,law=record.law,judgement=record.judgement,content=record.content,place=record.place) total=total+100 endtime=datetime.datetime.now() print 'cost time ',(endtime-starttime).seconds,' s'
def run(db_name, user, passwd, tables): db = web.database(dbn='mysql', db=db_name, user=user, passwd=passwd, charset=None) db_utf8 = web.database(dbn='mysql', db=db_name, user=user, passwd=passwd) db.printing = db_utf8.printing = False for table in tables: print '#' * 20 + table + '#' * 20 db.query('alter table %s convert to character set utf8' % table) for i, m in enumerate(db.select(table)): if i % 100 == 0: print i for k, v in m.items(): db_utf8.update(table, where='id=%s' % m.id, **m)
def connect(url): r = urlparse.urlparse(url) if r.scheme == 'sqlite': if (r.path != ''): db = r.path else: db = r.netloc return web.database(dbn='sqlite', db=db) if r.scheme == 'mysql': return web.database(dbn='mysql', host = r.hostname, user = r.username, pw = r.password, db = r.path[1:])
def PUT(self, setting): if is_admin(): try: if setting != "metrics": self[setting] = web.input()["value"] if setting in ("dbtype", "dbhost", "dbname", "dbuser", "dbpass", "dbport"): try: db = web.database(dbn=settings["dbtype"], host=settings["dbhost"], db=settings["dbname"], user=settings["dbuser"], password=settings["dbpass"], port=int(settings["dbport"])) except ValueError: db = web.database(dbn=settings["dbtype"], host=settings["dbhost"], db=settings["dbname"], user=settings["dbuser"], password=settings["dbpass"]) return json.dumps({"status": "OK", "message": "Setting Updated"}) except KeyError: pass return web.Forbidden()
def run(db_name, drop=False): db = web.database(dbn='mysql', db=db_name, user=raw_input('User: '******'Password: ') ) sql_cache.Cache.make_sql_table(db, drop)
def POST(self): ''' Handles changing password. ''' chpform = self.chpform() if not loggedin(): raise web.seeother('/login') elif not chpform.validates(): return RENDER.changepass(chpform, None) else: oldpassword = gethash(chpform['oldpassword'].value) newpassword = gethash(chpform['newpassword'].value) if oldpassword == newpassword: return RENDER.changepass(chpform, 'The new password can not be the same as the new one'.upper()) try: dbh = web.database(dbn=DBTYPE, db=DBFILENAME) rows = dbh.select(USERTABLE, what='password', where='user="******"'.format(SESSION.username)) dbupass = rows[0].password except IndexError: SESSION.kill() raise web.internalerror() except OperationalError: raise web.internalerror() if dbupass == oldpassword: updatepassword(SESSION.username, newpassword) raise web.seeother('/') else: return RENDER.changepass(chpform, 'Password entered wrong'.upper())
def setup_database(): if web.config.get('db_parameters'): db = web.database(**web.config.db_parameters) web.insert = db.insert web.select = db.select web.update = db.update web.delete = db.delete web.query = db.query def transact(): t = db.transaction() web.ctx.setdefault('transaction_stack', []).append(t) def rollback(): stack = web.ctx.get('transaction_stack') t = stack and stack.pop() t and t.rollback() def commit(): stack = web.ctx.get('transaction_stack') t = stack and stack.pop() t and t.commit() web.transact = transact web.rollback = rollback web.commit = commit
def __init__(self): dbfile = os.path.realpath(os.curdir) + '/test.db' self.db = web.database(dbn="sqlite", db=dbfile) self.db.supports_multiple_insert = True if not os.path.exists(dbfile): """ category 分类 parent 第一次发表为空,如果是回复,则这里是原消息的id字段 user 用户名 email 用户email title 标题 time 添加时间戳 message 内容 id 哈希, md5(category + user + title), 限制: 同一个用户再同一分类下不能发表目录名一致的文章 """ self.db.query("create table topic ( category text, parent text, user text, email text, title text, time real, message text, id text )") #留言板 self.addLeavemsg("zhanghua", "*****@*****.**", "你好,很高兴认识你丫!") self.addLeavemsg("陈怡", "*****@*****.**", "你好,认识你也很高兴", parent="d4eafca37aa5908a497dfd527a72bb2a", message="d4eafca37aa5908a497dfd527a72bb2a") self.addLeavemsg("zhanghua", "*****@*****.**", "QQ是多少啊?可以加个好友吗?", parent="d4eafca37aa5908a497dfd527a72bb2a", message="db382e936792b162f0bc429f312456b9") self.addLeavemsg("yanyan", "*****@*****.**", "好久不见,最近怎么样啊!") #课程 self.addCase("陈怡", "*****@*****.**", "语文", message="2012-02-07~2012-02-08 语文语文") self.addCase("陈怡", "*****@*****.**", "数学", message="2012-02-07~2012-02-08 数学数学") self.addCase("陈怡", "*****@*****.**", "英语", message="2012-02-07~2012-02-08 英语英语") self.addCase("陈怡", "*****@*****.**", "自习", message="2012-02-07~2012-02-08 自习自习") self.addCase("陈怡", "*****@*****.**", "地理", message="2012-02-07~2012-02-08 地理地理") #相册 self.addAlbum("陈怡", "*****@*****.**", "我的风采") self.addAlbum("陈怡", "*****@*****.**", "校园风光")
def __init__(self): self.bid = bidForm() db = web.database(dbn='sqlite', db='../sqlite.db') users = db.select('users', what='userId').list() self.bid.buyer.args = [] for user in users: self.bid.buyer.args = self.bid.buyer.args + [user.userId]
def mysqlConnect(self): try: self.dbconnet = web.database(dbn=self.dbn, user=self.user, pw=self.pwd, db=self.dbname) return self.dbconnet except Exception as e: print e
def GET(self): db = web.database(dbn='sqlite', db=db_dir) #retrieve only the twenty most recent articles articles = db.select('articles', order='epochtime DESC', limit = 20) update_time = db.select('articles', what='dbtime', limit = 1, order = 'dbtime DESC')[0].dbtime return render.news(articles,update_time)
def check_and_fix_db_access_MySQL(params): logger.debug("Checking database access...") db_name = params.pop('db') error_code = 0 connection = web.database(**params) try: connection.query("USE {0}".format(db_name)) logger.debug("\tDatabase access confirmed") except OperationalError as e: error_code = e[0] if e[0] == 1049: logger.debug("\tDatabase {0} not found. Creating.".format(db_name)) try: connection.query("CREATE DATABASE IF NOT EXISTS {0};".format(db_name)) logger.info("\tDatabase access restored.") error_code = 0 except: logger.critical("\tError creating database: ") logger.critical("\t\tError {0}: {1}".format(e[0], e[1])) elif e[0] == 1045: # Access Denied for '%s'@'%s' (using password: (YES|NO)) logger.critical("\tUnable to access database: invalid username or password") logger.critical("\t Check your config file or environment variables.") else: logger.critical("\tUnable to access database: ") logger.critical("\t\t{0}: {1}".format(e[0], e[1])) return error_code
def connect_db(): global params global connection if connection is None: connection = web.database(**params) connection.printing = False # False by default return connection
def __init__(self, path): if not os.path.exists(path) and path != ':memory:': l.critical("Database %s does not exist, cannot connect." % path) raise IOError self.xec = web.database(dbn='sqlite', db=path) # Prevent web.db from printing queries self.xec.printing = False
def update_005(): import web from infogami.infobase._dbstore.store import Store db = web.database(dbn="postgres", db="openlibrary", user=os.getenv("USER"), pw="") store = Store(db) for row in db.query("SELECT thing.key, thing.created, account.* FROM thing, account WHERE thing.id=account.thing_id"): username = row.key.split("/")[-1] account_key = "account/" + username if store.get(account_key): continue else: account = { "_key": account_key, "type": "account", "email": row.email, "enc_password": row.password, "username": username, "lusername": username.lower(), "bot": row.bot, "status": row.verified and "verified" or "pending", "created_on": row.created.isoformat(), } email_doc = { "_key": "account-email/" + row.email, "type": "account-email", "username": username } store.put_many([account, email_doc])
def __init__(self): global conf self.data = conf db = web.database(host=conf['dbhost'],dbn=conf['dbtype'], db=conf['dbname'], user=conf['dbuser'], pw=conf['dbpasswd']) cfgs = db.select('configs') for cfg in cfgs: self[cfg['cfgkey']] = cfg['cfgvalue']
def dbquery(self, cid, filesize): db = web.database(dbn="mysql", user="******", pw="view_hot", db="xmp_hot_view", host="127.0.0.1") tbl = "hot_view_" + cid[0:2] infos = db.select(tbl, where="cid=$cid and filesize=$filesize", vars={"cid": cid, "filesize": filesize}) res = "" for info in infos: res += ( r"<tr><td>" + str(info.id) + r"</td><td>" + info.cid + r"</td><td>" + str(info.filesize) + r"</td><td>" + info.ext + r"</td><td>" + str(info.duration) + r"</td><td>" + str(info.view_num) + r"</td><td>" + str(info.operate_num) + r"</td></tr>" ) return res
def POST(self): i = web.input() db = web.database(dbn="postgres", user='******', pw='webpyz', db="webpydata") n = db.insert('test', id=i.id, name=i.name) rows = db.select('test') raise web.seeother('/dbrows') return render.dbrows(rows)
def longquery(query, vars, chunk_size=10000): """Execute an expensive query using db cursors. USAGE: for chunk in longquery("SELECT * FROM bigtable"): for row in chunk: print row """ # DB cursor is valid only in the transaction # Create a new database to avoid this transaction interfere with the application code db = web.database(**db_parameters) db.printing = False tx = db.transaction() try: db.query("DECLARE longquery NO SCROLL CURSOR FOR " + query, vars=vars) while True: chunk = db.query("FETCH FORWARD $chunk_size FROM longquery", vars=locals()).list() if chunk: yield chunk else: break finally: tx.rollback()
def GET(self, page): # read configuration dbConfig = configRead.getDictBySection('database') scheme = configRead.getDbScheme() pageConfig = configRead.getDictBySection("mreport") #prepare db dbType = dbConfig['db_type'] dbUser = dbConfig['db_user'] dbPassword = dbConfig['db_pw'] dbName = dbConfig['db_name'] tbStat = dbConfig['tb_url_info'] message = "" # parameter filiter if not page: page = 1 # connect to db db = web.database(dbn=dbType, user=dbUser, pw=dbPassword, db=dbName) # construct pagination widget totalItems = db.select(tbStat, what="COUNT(`date`) AS count")[0]['count'] pagination = Widget.Pagination(totalItems, pageConfig['item_per_page'], page)\ # get data datas = db.select(tbStat, order='date', limit=pagination.itemPerPage, offset=((pagination.currentPage-1)*pagination.itemPerPage)) render = web.template.render(globalConfig['template_dir']) return render.MayaPortal(message, Widget.Bigtable(scheme, datas, pagination))
def main(): m = olmemcache.Client(["ia331532:7060", "ia331533:7060"]) db = web.database(dbn="postgres", db="openlibrary", user="******", pw="", host="ia331526") t = db.transaction() try: db.query("DECLARE datacur CURSOR FOR SELECT thing.key, data.data FROM thing, data WHERE thing.id=data.thing_id and data.revision=thing.latest_revision ORDER BY thing.id") limit = 10000 i = 0 while True: i += 1 result = db.query('FETCH FORWARD $limit FROM datacur', vars=locals()).list() if not result: break t1 = time.time() d = dict((r.key, r.data) for r in result) try: m.set_multi(d) #m.add_multi(d) except: m.delete_multi(d.keys()) print >> web.debug, 'failed to add to memcached', repr(r.key) t2 = time.time() print >> web.debug, "%.3f" % (t2-t1), i, "adding memcache records" finally: t.rollback()
def auto_push(): db = web.database(dbn='mysql', db='test', user='', pw='', charset='utf8') query = db.query today_utc = time.time() today_utc8_gm = get_utc8_gm(today_utc) # 获取当前分钟内的 adboard start_utc = op_utc(today_utc, 'minus', today_utc8_gm.tm_sec) end_utc = op_utc(today_utc, 'add', 60 - today_utc8_gm.tm_sec) start = construct_localtime(start_utc) end = construct_localtime(end_utc) query = db.query("SELECT * FROM adboard WHERE starttime >= '%s' AND starttime < '%s' ORDER BY starttime DESC" % ( start, end)) ad_list = [q for q in query] # 获取 2 小时后当前分钟内的 adboard start_utc = op_utc(start_utc, 'add', 0, 0, 2) end_utc = op_utc(end_utc, 'add', 0, 0, 2) start = construct_localtime(start_utc) end = construct_localtime(end_utc) query = db.query("SELECT * FROM adboard WHERE starttime >= '%s' AND starttime < '%s' ORDER BY starttime DESC" % ( start, end)) ad_list += [q for q in query] r = requests.post('http://127.0.0.1:1234/api/user/login', json=json.dumps(dict(username='******', password='******'))) cookies = dict(webpy_session_id=r.cookies['webpy_session_id']) for ad in ad_list: r = requests.post( 'http://127.0.0.1:1234/api/msg/push', cookies=cookies, json=json.dumps(dict(title=ad.title, editor=ad.editor, details=ad.starttime.strftime('%Y-%m-%d %H:%M'), url=''))) print 'push %s: response code = %s, text = %s' % (ad.title, r, r.text) time.sleep(1) return time.time() - today_utc
# -*- coding: utf-8 -*- import json import scrapy import time from scrapy.http import Request import sys import web, re from dianpingshop.items import DianPIngAllStoreJson import redis reload(sys) sys.setdefaultencoding('utf-8') db = web.database(dbn='mysql', db='o2o', user='******', pw='hh$reader', port=3306, host='10.15.1.25') dt = time.strftime('%Y-%m-%d', time.localtime()) header = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:53.0) Gecko/20100101 Firefox/53.0' } # redis_ = redis.Redis(host='127.0.0.1', port=6379) redis_ = redis.Redis(host='10.15.1.11', port=6379) class PetSpider(scrapy.Spider): name = "pet" allowed_domains = ["dianping.com"] # start_urls = ['http://t.dianping.com/citylist'] def __init__(self, category_id='20', little_category_id='33759', category_name='health', *args, **kwargs):
# -*- coding:utf-8 -*- # import web import datetime db = web.database(dbn='mysql', db='appointment', user='******', pw='123456') def login(username, password, usertype): '''登录验证''' users = db.select('user', where='username=$username AND password=$password AND usertype=$usertype', vars=locals()) return users def current_id(): '''当前登录用户的id''' uid = web.cookies().get('uid') return uid def get_items(table): return db.select(table[0],order=table[1][-1]+' DESC') def get_item(table,item_id): try: return db.select(table[0], where=table[1][0]+'=$item_id', vars=locals())[0] except IndexError: return None def new_item(table,dic): a=dic.keys() s='db.insert(table[0]' for i in range(len(dic)): s=s+','+a[i]+'=dic[a['+str(i)+']]' exec(s+')')
# -*- coding: utf-8 -*- # coding:utf8 import sys import json import web reload(sys) sys.setdefaultencoding('utf8') db = web.database(dbn='mysql', db='o2o', user='******', pw='hh$writer', port=3306, host='10.15.1.24') # db = web.database(dbn='mysql', db='hillinsight', user='******', pw='hh$writer', port=3306, host='10.15.1.24') #delete_sql = '''DELETE FROM o2o.t_hh_dianping_shop_comments WHERE shop_id in (SELECT distinct dianping_id as shop_id FROM pet_cloud.hospital_base_information WHERE dianping_id is not null and dianping_id!=0)''' #delete_sql = '''DELETE FROM t_hh_dianping_shop_comments_pet WHERE dt='2018-03-30' ''' #db.query(delete_sql) def from_parse(dd): dd = json.loads(dd) try: db.insert('t_hh_dianping_shop_comments_pet', **dd) dd.pop('dt') db.insert('t_hh_dianping_shop_comments', **dd) except Exception as e: print e for line in sys.stdin:
import web db_host = 'grp6m5lz95d9exiz.cbetxkdyhwsb.us-east-1.rds.amazonaws.com' db_name = 'y7cjfhg6p993sgr1' db_user = '******' db_pw = 'ouiap1hd9skbwdje' ''' db_host = 'localhost' db_name = 'web_admin' db_user = '******' db_pw = 'admin.2021' ''' db = web.database( dbn='mysql', host=db_host, db=db_name, user=db_user, pw=db_pw )
#coding: utf-8 import re import sys import web import traceback from web.db import sqlquote from settings import DATABASE_ARGS from settings import TIME_ZONE from lib.paginator import Paginator from utils.basecache import cache_db from utils.basecache import cc from utils.basecache import bc database = web.database(**DATABASE_ARGS) class NovelDB(object): BASIC_FILTER = "status not in (0, 100)" def __init__(self): pass @property def db(self): if not hasattr(self, "_db"): self._db = database self._db.query('SET TIME ZONE "%s"' % TIME_ZONE) return self._db def add_novel(self, args):
import web import os from web.contrib.template import render_jinja DB_NAME = 'db.sqlite3' db = web.database(dbn='sqlite', db=DB_NAME) db.printing = False upload_dir = "./receive/" web.config.debug = True # used to save password PASSWORD_SALT = "jlk38blksdivnsfFoNsAlT754-" # session parameters web.config.session_parameters.cookie_name = "visiticm_sid" web.config.session_parameters.secret_key = 'session--saltvisiticm' # template cache cache = False # static page path STATIC_PATH = os.path.realpath('static') # view render = render_jinja('app/templates', encoding='utf-8') render._lookup.globals.update({ 'session_getter': (lambda: web.config._session), 'title_list_getter': (lambda: web.config._title_list)
import web import datetime from datetime import date db = web.database(dbn='sqlite', db='../backThermostat/thermostat.db') # TABLE PROGRAM def get_programs(): return db.select('program', order='day,hour_ini ASC') def get_program(id): try: return db.select('program', where='id=$id', vars=locals())[0] except IndexError: return None def get_active_program(): try: day = date.today().isoweekday() return db.query( "SELECT * FROM PROGRAM WHERE day=$day AND hour_ini<=time('now','localtime') AND hour_end>time('now','localtime')", vars={'day': day})[0] except IndexError: return None def new_program(day, hour_ini, hour_end, temp): db.insert('program',
#!/usr/bin/python import web #import web and MySQLdb import MySQLdb db = web.database(dbn='mysql', user='******', pw='123321', db='iot') #connect the database render = web.template.render('templates/') list=[] urls = ( '/', 'index' ) class index: def GET(self): #select the database, store the values info = db.select('info',what='id,temp,humi',order='id DESC',limit=15) return render.chart(info) #return to the chart.html if __name__ == "__main__": app = web.application(urls, globals()) app.run()
#!/usr/bin/python # -*- coding: UTF-8 -*- import urllib import time import web import json # Top250(/v2/movie/top250),获取豆瓣电影排行榜前 250 部电影列表; # 电影条目信息(/v2/movie/subject/:id),获取一部电影的详细信息。 db = web.database(dbn='sqlite', db='E:\sqlite\MovieSite.db') movie_ids = [] for index in range(0, 250, 50): response = urllib.urlopen( 'http://api.douban.com/v2/movie/top250?start=%d&count=50' % index) data = response.read() data_json = json.loads(data) movie250 = data_json['subjects'] for movie in movie250: movie_ids.append(movie['id']) time.sleep(3) print len(movie_ids) def add_movie(data): movie = json.loads(data) #print movie['title'] db.insert('movie', id=int(movie['id']), title=movie['title'], origin=movie['original_title'],
import web """ parametros de configuracion para conectarse a una base de datos MySQL O MariaDB """ DB = web.database( dbn='mysql', #motor de base de datos host='localhost', #ruta del server db='ria_iniciales', #nombre de la base de datos user='******', #usuario de la base de datos pw='ria.2019', #password del usuario port=3306 #puerto de mariadb )
parser.add_argument('--item_id') parser.add_argument('--config', default='openlibrary.yml') args = parser.parse_args() config_file = args.config config.load(config_file) import_bot_password = config.runtime_config['load_scribe'][ 'import_bot_password'] # '/1/var/log/openlibrary/load_scribe' load_scribe_log = config.runtime_config['load_scribe']['log'] ol = OpenLibrary("http://openlibrary.org") ol.login('ImportBot', import_bot_password) password = Popen(["/opt/.petabox/dbserver"], stdout=PIPE).communicate()[0] db = web.database(dbn='mysql', host='dbmeta.us.archive.org', user='******', \ passwd=password, db='archive') db.printing = False re_census = re.compile('^\d+(st|nd|rd|th)census') re_edition_key = re.compile('^/(?:books|b)/(OL\d+M)$') def read_short_title(title): return str(fast_parse.normalize_str(title)[:25]) def make_index_fields(rec): fields = {} for k, v in rec.iteritems(): if k in ('lccn', 'oclc', 'isbn'):
def __init__(self): self.__database = web.database(dbn='mysql', host='localhost', user='******', pw='apps', db='orcl')
units = {} units["active_power"] = "Watts" units["reactive_power"] = "Watts" units["voltage"] = "Volts" units["current"] = "Amps" units["frequency"] = "Hz" units["energy"] = "Wh" s = json.load(open("../configuration/bmh_matplotlibrc.json")) matplotlib.rcParams.update(s) TIMEZONE = 'Asia/Kolkata' urls = ('/upload', 'Upload', '/query', 'query', '/', 'home') filedir = "/home/muc/Desktop/jplug_temp/" render = web.template.render('templates') db = web.database(dbn='mysql', db='jplug', user='******', pw='password') import random, string def randomword(length): return ''.join(random.choice(string.lowercase) for i in range(length)) class home: def GET(self): return render.index() class Upload:
#-*- coding: utf-8 -*- import web, math web.config.debug = False urls = ( '/', 'index', '/-', 'api', '/(\w+)\+', 'stats', '/(\w+)', 'redirect', ) app = web.application(urls, globals(), autoreload=True) db = web.database(dbn='sqlite', db='likamag.db') render = web.template.render('tpl/') site = "http://example.com/" class magic: codeset = 'omyg5dNIF81Uiws4hzeKPBJX9lack72bj0xvqyrOMYGDRW6SHZnfu3ELACVQY' base = len(codeset) def to(self, id): encoded = '' while id > 0: position = int(id % self.base) encoded = ''.join([self.codeset[position:position + 1], encoded]) id = math.floor(id / self.base)
# get logging interface logger = logger_instance(__name__) web.config.debug = debug_mode() # Get entication configuration SECRET_KEY = os.environ['SECRET_KEY'] TOKEN_LIFETIME = int(os.environ['TOKEN_LIFETIME']) PBKDF2_ITERATIONS = int(os.environ['PBKDF2_ITERATIONS']) # Define routes urls = ("/tokens(/?)", "tokenctrl.TokenController", "/accounts(/?)", "accountctrl.AccountController") try: db = web.database(dbn='postgres', host=os.environ['DB_HOST'], user=os.environ['DB_USER'], pw=os.environ['DB_PASS'], db=os.environ['DB_DB']) except Exception as error: logger.error(error) raise Error(FATAL) def api_response(fn): """Decorator to provide consistency in all responses""" def response(self, *args, **kw): data = fn(self, *args, **kw) count = len(data) if count > 0: return {'status': 'ok', 'code': 200, 'count': count, 'data': data} else:
config_file_db_section = 'db' cp = ConfigParser.SafeConfigParser() cp.read(config_file) logger = logging.getLogger() handler = logging.StreamHandler() formatter = logging.Formatter('%(asctime)s %(levelname)-8s %(message)s') handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) #TODO: support postgres & sqlite db = web.database(dbn='mysql', db = cp.get(config_file_db_section, 'dbname'), user = cp.get(config_file_db_section, 'username'), pw = cp.get(config_file_db_section, 'password') ) urls = ( '/', 'index', '/switch', 'switch_all', '/switch/(.+)', 'switch_by_dpid', '/link', 'link_all', '/link/(.+)/(.+)', 'link_by_dpid', '/domain', 'domain', '/domain/(.+)', 'domain', '/domain/id/(.+)', 'domain_by_id' ) class index:
def add_poll_recipients_task(poll_id, groups=[], districts=[], start_now=False, poll_type="", qn="", d_resp=""): print("Gona asynchronously add poll recipients:[{0}]".format(poll_id)) db = web.database(dbn='postgres', user=db_conf['user'], pw=db_conf['passwd'], host=db_conf['host'], port=db_conf['port']) # format input postgresql style groups_str = str([int(x) for x in groups ]).replace('[', '{').replace(']', '}').replace('\'', '\"') districts_str = str([int(x) for x in districts ]).replace('[', '{').replace(']', '}').replace('\'', '\"') db.query("INSERT INTO poll_recipients(poll_id, reporter_id) " "SELECT %s, id FROM reporters where district_id = " "ANY('%s'::INT[]) and groups && '%s'::INT[]" % (poll_id, districts_str, groups_str)) if start_now: db.query("UPDATE polls SET start_date = NOW() WHERE id = $id", {'id': poll_id}) rs = db.query( "SELECT array_agg(uuid) uuids FROM reporters WHERE id IN (" "SELECT reporter_id FROM poll_recipients WHERE poll_id = %s) " % (poll_id)) if rs: recipient_uuids = list(rs[0]['uuids']) if poll_type in poll_flows: flow_uuid = random.choice(poll_flows[poll_type]) flow_starts_endpoint = apiv2_endpoint + "flow_starts.json" contacts_len = len(recipient_uuids) j = 0 print("Starting {0} Contacts in Flow [uuid:{1}]".format( contacts_len, flow_uuid)) for i in range( 0, contacts_len + MAX_CHUNK_SIZE, MAX_CHUNK_SIZE)[1:]: # want to finsh batch right away chunk = recipient_uuids[j:i] params = { 'flow': flow_uuid, 'contacts': chunk, 'extra': { 'poll_id': poll_id, 'question': qn, 'default_response': d_resp } } post_data = json.dumps(params) try: requests.post(flow_starts_endpoint, post_data, headers={ 'Content-type': 'application/json', 'Authorization': 'Token %s' % api_token }) # print("Flow Start Response: ", resp.text) except: print( "ERROR Startig Flow [uuid: {0}]".format(flow_uuid)) j = i print("Finished Starting Contacts in Flow [uuid:{0}]".format( flow_uuid)) try: db._ctx.db.close() except: pass
import web db = web.database(dbn='sqlite', db='tasks.db') cache = False
from __future__ import absolute_import, division, print_function import web import os from datetime import datetime DB_FILE = os.path.join(os.path.dirname(__file__), 'data', 'db.sqlite') db = web.database(dbn='sqlite', db=DB_FILE) #db._db_cursor().text_factory = sqlite3.OptimizedUnicode def get_posts(): return [post for post in db.select('HelpRequest', order='id DESC')] def get_post(id): try: post_data = db.select('HelpRequest', where='id=$id', vars=locals())[0] except IndexError: post_data = None return post_data def get_item(id): try: item_data = db.select('HelpItem', where="id=$id", vars=locals())[0] except IndexError: item_data = None return item_data
import web db = web.database(dbn="mysql", db="wiki", user="******") def get_pages(): return db.select("pages", order="id DESC") def get_page_by_url(url): try: return db.select("pages", where="url=$url", vars=locals())[0] except IndexError: return None def get_page_by_id(id): try: return db.select("pages", where="id=$id", vars=locals())[0] except IndexError: return None def new_page(url, title, text): db.insert("pages", url=url, title=title, content=text) def del_page(id): db.delete("pages", where="id=$id", vars=locals())
#_*_coding:utf-8_*_ #filename:main.py import web urls=( '/','index', '/dbdo','selectdo', '/add','adddata' ) db=web.database(dbn='mysql',host='localhost',port=3306,user='******',pw='112233',db='cstmzsfile') render=web.template.render('templates/') class index: def GET(self): i=web.input(name='hook') return render.index(i.name) class selectdo: def GET(self): showname=db.select('muser') return render.showuser(showname) class adddata: def POST(self): i=web.input() n=db.insert('muser',uname=i.uname,passw=i.passw) raise web.seeother('/dbdo')
# -*- coding: latin-1 -*- import web, shutil, sys, smtplib, calendar, psycopg2, base64, time, datetime from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from datetime import date from datetime import timedelta render = web.template.render('templates/', base='layout', globals={ 'str': str, 'date': date }) db = web.database(dbn='postgres', user='******', pw='postgres!23', db='respondai') ip = "respondai-waltercmg.c9users.io" #ip = "10.32.64.91:8080" #http://webpy.org/cookbook/storeupload/ conexao = psycopg2.connect(database='respondai', user='******', password='******') cursor = conexao.cursor() # para instalar o psycopg2: sudo pip install django psycopg2 # para configurar o postgres: # sudo vi /etc/postgresql/9.3/main/pg_hba.conf # trocar a linha local all postgres peer POR # local all postgres md5 # restartar o postgres: sudo service postgresql restart
def start_poll_task(poll_id): db = web.database(dbn='postgres', user=db_conf['user'], pw=db_conf['passwd'], db=db_conf["name"], host=db_conf['host'], port=db_conf['port']) res = db.query( "SELECT question, default_response, type FROM polls WHERE id = $id ", {'id': poll_id}) if res: poll = res[0] qn = poll['question'] d_resp = poll['default_response'] poll_type = poll['type'] rs = db.query( "SELECT array_agg(uuid) uuids FROM reporters WHERE id IN (" "SELECT reporter_id FROM poll_recipients WHERE poll_id = %s) " % (poll_id)) if rs: recipient_uuids = list(rs[0]['uuids']) if poll_type in poll_flows: flow_uuid = random.choice(poll_flows[poll_type]) flow_starts_endpoint = apiv2_endpoint + "flow_starts.json" contacts_len = len(recipient_uuids) j = 0 print("Starting {0} Contacts in Flow [uuid:{1}]".format( contacts_len, flow_uuid)) for i in range( 0, contacts_len + MAX_CHUNK_SIZE, MAX_CHUNK_SIZE)[1:]: # want to finsh batch right away chunk = recipient_uuids[j:i] params = { 'flow': flow_uuid, 'contacts': chunk, 'extra': { 'poll_id': poll_id, 'question': qn, 'default_response': d_resp } } post_data = json.dumps(params) try: requests.post(flow_starts_endpoint, post_data, headers={ 'Content-type': 'application/json', 'Authorization': 'Token %s' % api_token }) # print("Flow Start Response: ", resp.text) except: print( "ERROR Startig Flow [uuid: {0}]".format(flow_uuid)) j = i db.query("UPDATE polls set start_date = NOW() WHERE id=$id", {'id': poll_id}) print("Finished Starting Contacts in Flow [uuid:{0}]".format( flow_uuid)) try: db._ctx.db.close() except: pass
web.config.session_parameters['timeout'] = 3600 web.config.session_parameters['ignore_expiry'] = True web.config.session_parameters['ignore_change_ip'] = True web.config.session_parameters['secret_key'] = 'www.bt.cn' web.config.session_parameters['expired_message'] = 'Session expired' # 宝塔数据文件,sqlite,默认的存储目录为/data/session.db dbfile = '/dev/shm/session.db' src_sessiondb = 'data/session.db' if not os.path.exists(src_sessiondb): print db.Sql().dbfile('session').create('session') if not os.path.exists(dbfile): os.system("\\cp -a -r " + src_sessiondb + " " + dbfile) # 创建sqlite数据库会话 sessionDB = web.database(dbn='sqlite', db=dbfile) session = web.session.Session(app, web.session.DBStore(sessionDB, 'sessions'), initializer={'login': False}) def session_hook(): session.panelPath = os.path.dirname(__file__) web.ctx.session = session app.add_processor(web.loadhook(session_hook)) #获取当前模板 templatesConf = 'data/templates.pl' if os.path.exists('templates/index.html'): os.system('rm -f templates/*.html')
title = title.replace('í','i') title = title.replace('ó','o') title = title.replace('ú','u') """ return title tstart = datetime.datetime.now() print 'rst-to-drupal.py v%s' % __version__ print 'Migrate from Drupal 6.26 (and possibly others) to rstblog' print 'Copyright (C) 2013 Pablo Castellano <*****@*****.**>' print db = web.database(dbn="mysql", db=DB_NAME, user=DB_USER, pw=DB_PASSWD, host=DB_HOST) try: if db.ctx: print 'Connected to database' except _mysql.OperationalError, e: print 'Error connecting to database:', e sys.exit(1) except: print 'Unknown error' sys.exit(1) print 'Temporary folder:', TMPPATH
import web from tiprepo.models.index import Index from tiprepo.models.viewrepo import ViewRepo from tiprepo.config import MYSQL_HOST, MYSQL_DB, MYSQL_USER, MYSQL_PASS #web.config.debug = False urls = ( '/', 'Index', '/(.+)/(.+)', 'ViewRepo', ) app = web.application(urls, globals()) db = web.database(dbn='mysql', host=MYSQL_HOST, db=MYSQL_DB, user=MYSQL_USER, pw=MYSQL_PASS) def hook(): web.ctx.db = db app.add_processor(web.loadhook(hook)) def run(): app.run()
import web db = web.database( dbn='sqlite', db= '../create_auctionbase/AuctionBase' #TODO: add your SQLite database filename ) ######################BEGIN HELPER METHODS###################### # Enforce foreign key constraints # WARNING: DO NOT REMOVE THIS! def enforceForeignKey(): db.query('PRAGMA foreign_keys = ON') # initiates a transaction on the database def transaction(): return db.transaction() # Sample usage (in auctionbase.py): # # t = sqlitedb.transaction() # try: # sqlitedb.query('[FIRST QUERY STATEMENT]') # sqlitedb.query('[SECOND QUERY STATEMENT]') # except Exception as e: # t.rollback() # print str(e)
def main(configfile): server.load_config(configfile) db = web.database(**web.config.db_parameters) with db.transaction(): migrate_account_table(db)
#coding:utf-8 -*- import web import requests def getip(): res=requests.get('http://members.3322.org/dyndns/getip') res.encoding = 'utf8' return str(res.text.replace("\n", "")) db = web.database(dbn='mysql', db = 'ss', user = '******', pw = '123456',host="123.207.227.205", port=3306) urls=( '/','Index', '/test','Test', '/user','User', '/user/xinxi','xinxi', '/user/jiedian','jiedian', '/user/config','config', '/user/link','link', '/user/liuliang','liuliang', '/add','add', '/login','login', '/regester','regester', '/invite','invite', '/tos','tos', '/admin','admin', '/adminlogin','adminlogin' ) render=web.template.render('templates') class admin: def GET(self): result = db.select('user') return render.admin(result)
import web # @ db = web.database(dbn='mysql', db='form', user='******', pw='1234') #result = db.select('contactos') #for row in result: # print row #db.insert('contactos',nombre='aldo', telefono=12345, email='al@hot') #db.update('contactos',where='id_contacto=3',email='ric@hot') #db.delete('contactos',where='id_contacto=6') num = 2 #result=db.query("select * from contactos where id_contactos=1") result = db.select('contactos', where="id_contacto=%s" % (num)) for row in result: print row.nombre