def connect_db(self, dbname=None): """ Connect to a database and return the connection and cursor objects. """ if dbname is None: conn = lite.connect(':memory') else: conn = lite.connect(dbname) c = conn.cursor() # JobEnd self.code_JobEnd = ['@97', '@96'] # MR include MR and @95 lines = c.execute("SELECT code FROM activitycode WHERE item IN ('MR', '@95')") self.code_MR = [item[0] for item in lines] # Production lines = c.execute("SELECT code FROM activitycode WHERE item='Prod'") self.code_Prod = [item[0] for item in lines] # Wash ups lines = c.execute("SELECT code, oeepoint FROM activitycode WHERE item='W-up'") lookup = {'ON': 1, 'OFF': -1, '': 0} self.code_Wup = dict([(item[0], lookup[item[1]]) for item in lines]) # production downtime, named as Process pd = ('Plate','Cplate','Stock','Customer','Process','Org','Dry') lines = c.execute("SELECT code, oeepoint FROM activitycode WHERE oeepoint IN ('%s','%s','%s','%s','%s','%s','%s')" % pd) self.code_Process = dict([(item[0],item[1]) for item in lines]) # Non-Production Downtime, named as Maintenance for historical reasons nonpd = ('Clean-up','Maintenance-I','Maintenance-H','Training','Nowork','Breakdown','Other') lines = c.execute("SELECT code, oeepoint FROM activitycode WHERE oeepoint IN ('%s','%s','%s','%s','%s','%s','%s')" % nonpd) self.code_Maintenance = dict([(item[0],item[1]) for item in lines]) return conn, c
def connect_trade_db(db_file): """ 连接历史交易数据库 """ if os.path.isfile(db_file): #if int(time.time()) - int(os.stat(db_file).st_mtime) >= 43200: #time_now = int(time.time()) #global tradedb_lastupdate_time #if time_now - tradedb_lastupdate_time >= 43200: #if time_now - tradedb_lastupdate_time >= 10: cx = sqlite.connect(db_file) cu = cx.cursor() #tradedb_lastupdate_time = time_now return (cu,cx,"old") #else: # cx = sqlite.connect(db_file) # cu = cx.cursor() # return (cu,cx,"nothing") else: #create cx = sqlite.connect(db_file) cu = cx.cursor() cu.execute('''create table stock( id integer primary key, s_date varchar(50), s_open varchar(10), s_high varchar(10), s_low varchar(10), s_close varchar(10), s_volume INTEGER )''') return (cu,cx,"new")
def connect_db(db_file): """ 连接数据库 """ if os.path.isfile(db_file): if int(time.time()) - int(os.stat(db_file).st_mtime) >= 86400: cx = sqlite.connect(db_file) cu = cx.cursor() return (cu,cx,"old") else: cx = sqlite.connect(db_file) cu = cx.cursor() return (cu,cx,"nothing") else: #create cx = sqlite.connect(db_file) cu = cx.cursor() cu.execute('''create table stock( id integer primary key, s_date varchar(50), s_open varchar(10), s_high varchar(10), s_low varchar(10), s_close varchar(10), s_volume INTEGER )''') return (cu,cx,"new")
def main(argv): try: source_file, dest_file, new_component = argv except ValueError: print "Usage: %s source.db target.db new_component" % os.path.basename(sys.argv[0]) return 1 # connect to databases source_conn = sqlite.connect(source_file) source_cur = source_conn.cursor() dest_conn = sqlite.connect(dest_file) dest_cur = dest_conn.cursor() qmarks = lambda seq: ",".join(["?" for r in seq]) try: # go through tickets in source tickets = source_cur.execute("SELECT * FROM ticket;") for ticket in tickets: # delete the id column - will get a new id old_id = ticket[0] ticket = list(ticket[1:]) # reset values of component and milestone rows ticket[4 - 1] = new_component # component ticket[11 - 1] = None # milestone # insert ticket into target db print "copying ticket #%s" % old_id dest_cur.execute( "INSERT INTO ticket " + "(" + (",".join([f[0] for f in source_cur.description[1:]])) + ") " + "VALUES(" + qmarks(ticket) + ")", ticket, ) new_id = dest_cur.lastrowid # parameters: table name, where clause, query params, id column index, table repr, row repr index def copy_table(table, whereq, params, id_idx, trepr, rrepr_idx): cur = source_conn.cursor() try: cur.execute("SELECT * FROM %s WHERE %s" % (table, whereq), params) for row in cur: row = list(row) row[id_idx] = new_id print "\tcopying %s #%s" % (trepr, row[rrepr_idx]) dest_cur.execute("INSERT INTO %s VALUES(%s)" % (table, qmarks(row)), row) finally: cur.close() # copy ticket changes copy_table("ticket_change", "ticket=?", (old_id,), 0, "ticket change", 1) # copy attachments copy_table("attachment", 'type="ticket" AND id=?', (old_id,), 1, "attachment", 2) # commit changes dest_conn.commit() finally: dest_conn.close()
def main(argv=None): if argv is None: argv = sys.argv if not os.path.exists(USERDETAILSDB): con=sqlite.connect(USERDETAILSDB) con.execute("create table userdetails(username varchar(100), entity varchar(10), content varchar(100) )") con.close() con = sqlite.connect(USERDETAILSDB) if (len(argv)<2): print "Not enough arguments" return -1 if (argv[1]=="GET"): #returns "username password" in cleartext. CHANGE FOR LDAP username=argv[2] try: res=con.execute("select * from userdetails where username=(?)", (username, )) found=0 for row in res: found+=1 print row[1], row[2] if (found==0): print "NOT_FOUND" except sqlite.Error, e: print "ERROR ", e.args[0] sys.exit()
def SqliteConnectNoArch(filename): # return either a sqlite3/2/1 connection if not os.path.exists(filename): logError("Could not find SQLite3 db file: %s" % filename) sys.exit(0); try: from pysqlite2 import dbapi2 as sqlite logInfo('Using sqlite-2') return sqlite.connect(filename) except: pass # logWarn("from pysqlite2 import dbapi2 failed") try: import sqlite3 as sqlite; logInfo('Using sqlite-3') return sqlite.connect(filename) except: pass # logWarn("import sqlite3 failed") try: import sqlite logInfo('Using sqlite-1') return sqlite.connect(filename) except: pass # logWarn("import sqlite failed") return None
def itemlist(self,index): Zone = self.w.Place.itemText(index) database_name = parseOutputconf()['spatialitedb'] db_connection = None try : db_connection = sqlite3.connect(database_name) except : self.worningmessage('spatialitedb not found') if db_connection is not None: db_connection = sqlite3.connect(database_name) db_cursor = db_connection.cursor() try : listatabelle = db_cursor.execute("SELECT name,latitude,longitude FROM %s ;" % (Zone)) tabelle = listatabelle.fetchall() tablelist = [] allist = [] for i in tabelle: tablelist.append(i[0]) allist.append(i[0]+' '+str(i[1])+' '+str(i[2])) allist.sort() tablelist.sort() self.w.placezone.clear() self.w.placezone.addItems(allist) db_connection.commit() except : print 'reload sqlite'
def __init__(self, path, log=None, params={}): assert have_pysqlite > 0 self.cnx = None if path != ':memory:': if not os.access(path, os.F_OK): raise TracError('Database "%s" not found.' % path) dbdir = os.path.dirname(path) if not os.access(path, os.R_OK + os.W_OK) or \ not os.access(dbdir, os.R_OK + os.W_OK): raise TracError('The user %s requires read _and_ write ' \ 'permissions to the database file %s and the ' \ 'directory it is located in.' \ % (getuser(), path)) if have_pysqlite == 2: self._active_cursors = weakref.WeakKeyDictionary() timeout = int(params.get('timeout', 10.0)) self._eager = params.get('cursor', 'eager') == 'eager' # eager is default, can be turned off by specifying ?cursor= if isinstance(path, unicode): # needed with 2.4.0 path = path.encode('utf-8') cnx = sqlite.connect(path, detect_types=sqlite.PARSE_DECLTYPES, check_same_thread=sqlite_version < 30301, timeout=timeout) else: timeout = int(params.get('timeout', 10000)) cnx = sqlite.connect(path, timeout=timeout, encoding='utf-8') ConnectionWrapper.__init__(self, cnx, log)
def __init__(self, path, params={}): assert have_pysqlite > 0 self.cnx = None if path != ':memory:': if not os.access(path, os.F_OK): raise TracError, u'Base de données "%s" non trouvée.' % path dbdir = os.path.dirname(path) if not os.access(path, os.R_OK + os.W_OK) or \ not os.access(dbdir, os.R_OK + os.W_OK): from getpass import getuser raise TracError, u"L'utilisateur %s a besoin des permissions " \ u"en lecture _et_ en écriture sur la base de " \ u"données %s ainsi que sur le répertoire " \ u"dans lequel elle est située." \ % (getuser(), path) if have_pysqlite == 2: self._active_cursors = weakref.WeakKeyDictionary() timeout = int(params.get('timeout', 10.0)) cnx = sqlite.connect(path, detect_types=sqlite.PARSE_DECLTYPES, check_same_thread=sqlite_version < 30301, timeout=timeout) else: timeout = int(params.get('timeout', 10000)) cnx = sqlite.connect(path, timeout=timeout, encoding='utf-8') ConnectionWrapper.__init__(self, cnx)
def __init__(self, database_files={"MESH":"mesh.db", "Cell":"cell.db", "Gene":"gene.db", "Molecular Roles": "molecular_role.db", "Mycobacterium_Genes": "mtb_genes.db"}): ######################## ## Creating Databases ## ######################## #MESH database self.mesh_db_connection = sqlite.connect(database_files["MESH"]) self.mesh_cursor = self.mesh_db_connection.cursor() self.mesh_cursor.execute("PRAGMA foreign_keys = ON;") #Cell database self.cell_db_connection = sqlite.connect(database_files["Cell"]) self.cell_cursor = self.cell_db_connection.cursor() self.cell_cursor.execute("PRAGMA foreign_keys = ON;") #Gene database self.gene_db_connection = sqlite.connect(database_files["Gene"]) self.gene_cursor = self.gene_db_connection.cursor() self.gene_cursor.execute("PRAGMA foreign_keys = ON;") #Molecular Role database self.molecular_roles_db_connection = sqlite.connect(database_files["Molecular Roles"]) self.molecular_roles_cursor = self.molecular_roles_db_connection.cursor() self.molecular_roles_cursor.execute("PRAGMA foreign_keys = ON;") #Mycobacterium Gene database self.mtb_db_connection = sqlite.connect(database_files["Mycobacterium_Genes"]) self.mtb_cursor = self.mtb_db_connection.cursor() self.mtb_cursor.execute("PRAGMA foreign_keys = ON;")
def __init__(self, dbfile): if not os.path.exists(dbfile): self.conn = sqlite.connect(dbfile) self.cursor = self.conn.cursor() self.create_table() self.conn = sqlite.connect(dbfile) self.cursor = self.conn.cursor()
def create_database (driver, database, username = None, password = None, hostname = None): if driver == 'sqlite': db = SqliteDatabase (database) return db elif driver == 'mysql': db = MysqlDatabase (database, username, password, hostname) elif driver == 'postgres': # TODO raise DatabaseDriverNotSupported else: raise DatabaseDriverNotSupported # Try to connect to database try: db.connect ().close () return db except AccessDenied, e: if password is None: import sys, getpass # FIXME: catch KeyboardInterrupt exception # FIXME: it only works on UNIX (/dev/tty), # not sure whether it's bug or a feature, though oldout, oldin = sys.stdout, sys.stdin sys.stdin = sys.stdout = open ('/dev/tty', 'r+') password = getpass.getpass () sys.stdout, sys.stdin = oldout, oldin return create_database (driver, database, username, password, hostname) raise e
def _getDb(self, channel): try: from pysqlite2 import dbapi2 except ImportError: raise callbacks.Error, 'You need to have PySQLite installed to ' \ 'use Karma. Download it at ' \ '<http://pysqlite.org/>' filename = plugins.makeChannelFilename(self.filename, channel) if filename in self.dbs: return self.dbs[filename] if os.path.exists(filename): self.dbs[filename] = dbapi2.connect(filename) return self.dbs[filename] db = dbapi2.connect(filename) self.dbs[filename] = db cursor = db.cursor() cursor.execute("""CREATE TABLE karma ( id INTEGER PRIMARY KEY, name TEXT, normalized TEXT UNIQUE ON CONFLICT IGNORE, added INTEGER, subtracted INTEGER )""") db.commit() def p(s1, s2): return int(ircutils.nickEqual(s1, s2)) db.create_function('nickeq', 2, p) return db
def create_db(): if sqlite.version_info > (2, 0): if use_custom_types: con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_DECLTYPES|sqlite.PARSE_COLNAMES) sqlite.register_converter("text", lambda x: "<%s>" % x) else: con = sqlite.connect(":memory:") if use_dictcursor: cur = con.cursor(factory=DictCursor) elif use_rowcursor: cur = con.cursor(factory=RowCursor) else: cur = con.cursor() else: if use_tuple: con = sqlite.connect(":memory:") con.rowclass = tuple cur = con.cursor() else: con = sqlite.connect(":memory:") cur = con.cursor() cur.execute(""" create table test(v text, f float, i integer) """) return (con, cur)
def bot_quote(mess, nick, botCmd): """Get quotes from database. Use !quote for a random quote, and !quote <num> for a particular quote.""" message = None if (len(botCmd) == 1): connection = sqlite.connect(dbPath) cursor = connection.cursor() cursor.execute("SELECT rowid, * FROM quotes ORDER BY RANDOM() LIMIT 1") quote = cursor.fetchone() message = u'Epic time [' + str(quote[0]) + u']:' + quote[1] + ' (set by ' + quote[2] + ')' elif (len(botCmd) == 2): connection = sqlite.connect(dbPath) cursor = connection.cursor() cursor.execute("SELECT max(rowid) FROM quotes") maxQuote = cursor.fetchone() if (re.match(r'[0-9]+', botCmd[1])): if (int(botCmd[1]) <= maxQuote[0]): roll = botCmd[1] cursor.execute("SELECT * FROM quotes WHERE rowid =" + str(roll)) quote = cursor.fetchone() message = u'Epic time[' + str(roll) + u']:' + quote[0] + ' (set by ' + quote[1] + ')' else: message = 'Max quote: ' + str(maxQuote[0]) else: message = 'Max quote: ' + str(maxQuote[0]) return message
def __init__(self, path): if(os.path.exists(path)): self.conn = sqlite.connect(path) self.cursor = self.conn.cursor() else: self.conn = sqlite.connect(path) self.cursor = self.conn.cursor() self._setupDB()
def connect(self): if not self.connection: self.connection = sqlite.connect(self.path) try: cursor = self.connection.cursor() except: self.connection = sqlite.connect(self.path) cursor = self.connection.cursor() return (self.connection, cursor)
def start(self): ''' Process itself ''' self.shellCommands = [] self.backupTime = time.localtime() backupDirectory = os.path.join(CM_DIR, self.machine, BACKUPS_SUBDIR, time.strftime("%Y%m%d_%H%M%S",self.backupTime)) if self.verboseLevel : print "backupDirectory = %s" % backupDirectory if self.dryRun : return os.makedirs(backupDirectory) snapshotFile = os.path.join(backupDirectory, SNAPSHOT_FILE) connection = sqlite.connect(snapshotFile) cursor = connection.cursor() sql = 'CREATE TABLE pvs (id INTEGER PRIMARY KEY, pvName VARCHAR(50), pvType VARCHAR(50), tolerance VARCHAR(50), subsystem VARCHAR(50), value VARCHAR(50))' cursor.execute(sql) connection = sqlite.connect(snapshotFile) cursor = connection.cursor() if self.verboseLevel : print "subsystems " print self.subsystems requestFiles = self.getRequestFiles(self.subsystems) for index, subsystem in enumerate(self.subsystems): requestFile = requestFiles[index] if self.verboseLevel: print "subsystem : %s" % subsystem print "requestFile : %s" % requestFile if len(requestFile)!=0 and os.path.isfile(requestFile): with open(requestFile,'r') as requestFh: for line in requestFh.readlines(): line = line.rstrip('\n') if len(line)==0 : continue if self.verboseLevel : print "line: >%s<" % line pvEntry = line.split(',') pvName = pvEntry[COL_PVNAME] pv = LEpicsPv(pvName) pvValue = pv.caget() pvType = self.getPvType(pvName) pvTolerance = 0 if len(pvEntry) >= (COL_TYPE+1): pvType = pvEntry[COL_TYPE] if len(pvEntry) >= (COL_TOLERANCE+1): pvTolerance = pvEntry[COL_TOLERANCE] sql = "INSERT INTO pvs VALUES (null, '%s','%s', '%d','%s','%s')" % ( pvName, pvType, pvTolerance, subsystem, pvValue ) if self.verboseLevel : print sql cursor.execute(sql) shutil.copy(requestFile,backupDirectory) connection.commit() cursor.close() connection.close() shutil.copy(self.getCminfoFile(),os.path.join(backupDirectory,CMSAVE_FILE)) self.writeBackupMetaFile(backupDirectory,snapshotFile)
def NewDatabase(filename): logging.info("Database: Creating new file: %s " % filename) if not filename: logging.warning("Database: No filename supplied to the database creator.") return con=sqlite.connect(filename) con.close(); con=sqlite.connect(filename); logging.debug("Database: Created new file: %s" % filename) return con;
def createDrops(): connection = sqlite.connect('test.db') memoryConnection = sqlite.connect(':memory:') cursor = connection.cursor() print "Please write what drops you want" drop = raw_input("Drop: ") cursor.execute('INSERT INTO STASH VALUES (null, ?, 0)', (drop,)) connection.commit() return createDrops()
def __init__(self): " Emcapsulando o .glade " self.itf = gtk.glade.XML('interface.glade') " Atribuindo os Widgets da interface a variaveis " self.janela = self.itf.get_widget('windowMain') self.janela.set_icon_from_file('icones/icone.png') self.itf.signal_autoconnect(self) self.treev = self.itf.get_widget('treeview') self.txtHora = self.itf.get_widget('txtHora') self.cbTempo = self.itf.get_widget('cbTempo') self.lbTermino = self.itf.get_widget('lbTermino') self.txtValor = self.itf.get_widget('txtValor') self.cbEquipamento = self.itf.get_widget('cbEquipamento') self.txtUsuario = self.itf.get_widget('txtUsuario') self.cbtPago = self.itf.get_widget('cbtPago') " chamando funções " self.modelAdd() self.janela.show_all() #self.janela.maximize() " SQLITE " " verifica se o banco de dados existe " if not os.path.exists('database.db'): self.sqlConnect = sqlite.connect('database.db') self.sqlCursor = self.sqlConnect.cursor() sql = """ CREATE TABLE aluguel ( codigo INT NOT NULL, hora CHAR(5), valor FLOAT, tempo CHAR(1), equipamento CHAR(50), usuario CHAR(50), termino CHAR(5), pago CHAR(3), cancelado CHAR(3), data DATE, PRIMARY KEY ( codigo ) )""" self.sqlCursor.execute(sql) self.sqlConnect.commit() sql = "INSERT INTO aluguel (codigo) VALUES (0)" self.sqlCursor.execute(sql) self.sqlConnect.commit() else: self.sqlConnect = sqlite.connect('database.db') self.sqlCursor = self.sqlConnect.cursor() self.selectRegistros() gtk.main()
def fight(self): self.cursor.execute('SELECT * FROM karakterler') row = self.cursor.fetchone() toplam = row[2]+row[3]+row[4]+row[5]+row[6] if toplam >= 30: print "stats are ok for now..." self.saglik = row[2] * 6 self.attack = row[4] * 3 connection = sqlite.connect('npc.db') memoryConnection = sqlite.connect(':memory:') cursor = connection.cursor() cursor.execute('SELECT * FROM npc ORDER BY RANDOM()') war = cursor.fetchone() self.mlife = war[3] self.mattack = war[5] self.mdefance = war[4] self.mexp = war[7] self.drop = war[2] pdamage = int(self.attack) - int(self.mdefance) if int(self.mdefance) > int(self.attack): print "He is stronger than we thought... RUNNN!" else: print "first fight begins..." if self.saglik == 0: print "Gratz! You are dead before you start the game, zero hp!" else: while self.saglik > 0: self.saglik = int(self.saglik) - int(self.mattack) print "your life is down to %s" % self.saglik self.mlife = int(self.mlife) - int(pdamage) print "mounster's life is %s" % self.mlife if self.mlife == 0: print "you win ohh yeahhh" expsystem.gexp() depo.addToStash() break elif self.mlife<int(pdamage): mlife = 0 print "mounster's life is %s" % self.mlife print "you win ohh yeahhh" expsystem.gexp() depo.addToStash() break elif self.saglik == 0: print "so death is only truth you have" expsystem.lexp() break else: continue
def getDB(): dbfile = "/var/www/poll/data/poll.db" if not os.path.isfile(dbfile): con = sqlite.connect(dbfile,detect_types=sqlite.PARSE_DECLTYPES|sqlite.PARSE_COLNAMES) con.execute("create table votes(title text, author text, comments text, useraddr text, added datetime)") else: con = sqlite.connect(dbfile,detect_types=sqlite.PARSE_DECLTYPES|sqlite.PARSE_COLNAMES) con.row_factory = sqlite.Row con.text_factory = str return con
def _connectDB(self): self.logger.info('Connecting to database...') if sys.platform == 'win32': self.dbConn = db.connect('joggerbot.db') else: host = self.mysqlOpts['server'] user = self.mysqlOpts['username'] passwd = self.mysqlOpts['password'] dbName = self.mysqlOpts['dbname'] self.dbConn = db.connect(host, user, passwd, dbName) self.logger.info('JoggerBot connected to its database')
def setUp(self): try: os.remove(get_db_path()) except OSError: pass self.con1 = sqlite.connect(get_db_path(), timeout=0.1) self.cur1 = self.con1.cursor() self.con2 = sqlite.connect(get_db_path(), timeout=0.1) self.cur2 = self.con2.cursor()
def __init__(self, root, name, repository, accountname, config): self.name = name self.root = root self.sep = "." self.config = config self.dofsync = config.getdefaultboolean("general", "fsync", True) self.filename = os.path.join(root, name) self.filename = repository.getfolderfilename(name) self.messagelist = {} self.repository = repository self.savelock = threading.Lock() self.doautosave = 1 self.accountname = accountname BaseFolder.__init__(self) self.dbfilename = self.filename + ".sqlite" # MIGRATE if os.path.exists(self.filename): self.connection = sqlite.connect(self.dbfilename) self.cursor = self.connection.cursor() self.cursor.execute("CREATE TABLE status (id INTEGER PRIMARY KEY, flags VARCHAR(50))") if self.isnewfolder(): self.messagelist = {} return file = open(self.filename, "rt") self.messagelist = {} line = file.readline().strip() assert line == magicline for line in file.xreadlines(): line = line.strip() uid, flags = line.split(":") uid = long(uid) flags = [x for x in flags] flags.sort() flags = "".join(flags) self.cursor.execute("INSERT INTO status (id,flags) VALUES (?,?)", (uid, flags)) file.close() self.connection.commit() os.rename(self.filename, self.filename + ".old") self.cursor.close() self.connection.close() # create new if not os.path.exists(self.dbfilename): self.connection = sqlite.connect(self.dbfilename) self.cursor = self.connection.cursor() self.cursor.execute("CREATE TABLE status (id INTEGER PRIMARY KEY, flags VARCHAR(50))") else: self.connection = sqlite.connect(self.dbfilename) self.cursor = self.connection.cursor()
def __init__(self, path, log=None, params={}): assert have_pysqlite > 0 self.cnx = None if path != ':memory:': if not os.access(path, os.F_OK): raise TracError(_('Database "%(path)s" not found.', path=path)) dbdir = os.path.dirname(path) if not os.access(path, os.R_OK + os.W_OK) or \ not os.access(dbdir, os.R_OK + os.W_OK): raise TracError( _('The user %(user)s requires read _and_ write ' 'permissions to the database file %(path)s ' 'and the directory it is located in.', user=getuser(), path=path)) self._active_cursors = weakref.WeakKeyDictionary() timeout = int(params.get('timeout', 10.0)) self._eager = params.get('cursor', 'eager') == 'eager' # eager is default, can be turned off by specifying ?cursor= if isinstance(path, unicode): # needed with 2.4.0 path = path.encode('utf-8') cnx = sqlite.connect(path, detect_types=sqlite.PARSE_DECLTYPES, check_same_thread=sqlite_version < (3, 3, 1), timeout=timeout) # load extensions extensions = params.get('extensions', []) if len(extensions) > 0: cnx.enable_load_extension(True) for ext in extensions: cnx.load_extension(ext) cnx.enable_load_extension(False) ConnectionWrapper.__init__(self, cnx, log)
def query_class(className): connection = sqlite.connect(DATABASE_NAME) cursor = connection.cursor() classToken = re.match('(?P<department>[a-zA-Z]+)(?P<number>\d+)', className) classDept = classToken.group('department').upper() classNumber = classToken.group('number') cursor.execute("SELECT * FROM %s WHERE subject=\"%s\" AND number=%s" % (COURSE_TABLE_NAME, classDept, classNumber)) res = cursor.fetchall() if len(res) <= 0: return json.dumps({'error': 'class not found'}) # For each <class> find the corresponding sections and place in section table sectionTable = {} for row in res: rowPk = int(row[0]) cursor.execute("SELECT * FROM %s WHERE course_pk=%d" % (SECTION_TABLE_NAME, rowPk)) sectionResult = cursor.fetchall() for section in sectionResult: timeStart = to_military(section[5]) timeEnd = to_military(section[6]) try: sectionTable[section[3]].append(timeStart) except KeyError: sectionTable[section[3]] = [timeStart] return json.dumps(sectionTable)
def _open(self): try: self._con = sqlite.connect(self._filename) self._cur = self._con.cursor() self._checktables() except sqlite.DatabaseError, e: raise DatabaseException("SQLite: %s" % (s))
def find_class(year, season, category, timeStart, days): connection = sqlite.connect(DATABASE_NAME) cursor = connection.cursor() # validate input data if re.search(r'[^a-zA-Z:0-9\s]+', year + season + category + timeStart + days) is not None: return json.dumps({'error': 'sql'}) timeMatch = re.match(r'(?P<hour>\d+):(?P<minute>\d+)\s*(?P<ampm>[AMP]+)', timeStart) timeStart = '%02d:%02d %s' % (int(timeMatch.group('hour')), int(timeMatch.group('minute')), timeMatch.group('ampm')) # form the category filter cat_hum = ['AAS','AFRO','AFST','AIS','ANTH','ARCH','ART','ARTD','ARTE','ARTF','ARTH','ARTS','ASST','CHLH','CINE','CLCV','CMN','CW','CWL','EALC','EDPR','EIL','ENGL','ENVS','EOL','EPS','EPSY','ESL','EURO','FAA','GEOG','GER','GLBL','GMC','GS','GWS','HCD','HDES','HDFS','HIST','HRE','HUM','JOUR','JS','LAST','LLS','MDIA','MDVL','MUS','MUSE','NUTR','PHIL','PS','PSYC','REES','REHB','RHET','RLST','RSOC','RST','RUSS','SAME','SCAN','SCR','SLAV','SOC','SPAN','SPED','SWAH','TURK','UKR','WLOF','WRIT','YDSH','ZULU'] cat_eng = ['ABE','ACES','AE','ASTR','BIOC','BIOE','BIOL','BIOP','BTW','CB','CDB','CEE','CHBE','CHEM','CPSC','CS','CSE','ECE','ECON','ENG','ENGH','ESE','GE','GEOG','GEOL','HORT','IB','IE','LIS','MATH','MCB','ME','MICR','MSE','NEUR','NPRE','NRES','PATH','PBIO','PHYS','PLPA','STAT','TE','TSM'] catpred = '' if category == 'humanities': catpred = 'AND subject IN ("' + '","'.join(cat_hum) + '")' elif category == 'engineering': catpred = 'AND subject IN ("' + '","'.join(cat_eng) + '")' else: catpred = '' FIND_CLASS_QUERY = 'SELECT year, semester, subject, number, name FROM (uiuc_sections INNER JOIN uiuc_courses ON uiuc_sections.course_pk=uiuc_courses.pk) WHERE semester="%s" AND year="%s" AND time_start="%s" AND days="%s" %s GROUP BY name ORDER BY subject ASC' formed_query = FIND_CLASS_QUERY % (season, year, timeStart, days, catpred) cursor.execute(formed_query) res = cursor.fetchall() return json.dumps(res)
def get_sqlite_conn(file): return sqlite.connect(file)
elif arg == "--prior": fprior = sys.argv[iargv + 1] iargv = iargv + 1 elif re.match(redep, arg): dependency = re.match(redep, arg).group(1) elif re.match(repart, arg) is not None: ##--partition=part partition = re.sub(repart, "", arg) else: arguments.append(arg) ##Other non-parsed options things else: arguments.append(arg) ##Other things iargv = iargv + 1 ##DB connection try: db = sqlite.connect(dbFile, timeout=dbTimeout) except: eprint("Error connecting to the database %d" % (dbFile)) raise curdb = db.cursor() curdb.execute("SELECT name FROM sqlite_master WHERE type='table' AND name=?", ("pendingJobs", )) if curdb.fetchone() == None: raise ValueError("The database does not contain the pendingJobs table") #curdb.execute("SELECT name FROM sqlite_master WHERE type='table' AND name=?",("submittedJobs",)) #if curdb.fetchone()==None: # raise ValueError("The database does not contain the submittedJobs table") ##
from pysqlite2 import dbapi2 as sqlite3 con = sqlite3.connect("foods.db") cur = con.cursor() cur.execute('select * from foods limit 10') for row in cur: print row[2]
def create_gnumed_import_sql(filename): # CREATE TABLE patients (gender TEXT, doctor INTEGER, surname TEXT, ID INTEGER PRIMARY KEY, identification_code TEXT, phone TEXT, given_name TEXT, birth_date TEXT, residence_address TEXT); print '' print 'set default_transaction_read_only to off;' print '' print "begin;" print '' now = gmDateTime.pydt_now_here().isoformat() clinica_db = sqlite.connect(database=filename) curs = clinica_db.cursor() cmd = 'select * from patients' curs.execute(cmd) keys = [r[0] for r in curs.description] row = curs.fetchone() if row is None: print "-- no patients in database" return row = sanitize_patient_row(dict(zip(keys, row))) print '-- import-related encounter type' print "INSERT INTO clin.encounter_type (description) SELECT '%s' WHERE NOT EXISTS (SELECT 1 FROM clin.encounter_type WHERE description = '%s' LIMIT 1);" % ( Clinica_encounter_type, Clinica_encounter_type) while row is not None: print '' print '-- next patient' print "INSERT INTO dem.identity (gender, dob, comment) VALUES ('%s', NULL, 'Clinica import @ %s');" % ( row['gender'], now) if row['birth_date'] is not None: if row['birth_date'].strip() != '': print """UPDATE dem.identity SET dob = '%s'::timestamp with time zone WHERE pk = currval('dem.identity_pk_seq');""" % row[ 'birth_date'] print """SELECT dem.add_name(currval('dem.identity_pk_seq')::integer, '%s'::text, '%s'::text, True);""" % ( row['given_name'], row['surname']) print """INSERT INTO dem.lnk_identity2ext_id (id_identity, external_id, fk_origin) VALUES (currval('dem.identity_pk_seq'), '%s', dem.add_external_id_type('Clinica primary key', 'Clinica EMR'));""" % row[ 'ID'] if row['identification_code'] is not None: print """INSERT INTO dem.lnk_identity2ext_id (id_identity, external_id, fk_origin) VALUES (currval('dem.identity_pk_seq'), '%s', dem.add_external_id_type('Clinica-external ID', 'Clinica EMR'));""" % row[ 'identification_code'] if row['phone'] is not None: print """INSERT INTO dem.lnk_identity2comm (fk_identity, url, fk_type) VALUES (currval('dem.identity_pk_seq'), '%s', dem.create_comm_type('homephone'));""" % row[ 'phone'] if row['residence_address'] is not None: print """INSERT INTO dem.lnk_identity2comm (fk_identity, url, fk_type) VALUES (currval('dem.identity_pk_seq'), '%s', dem.create_comm_type('Clinica address'));""" % row[ 'residence_address'] create_visit_sql(row['ID'], clinica_db) row = curs.fetchone() if row is not None: row = sanitize_patient_row(dict(zip(keys, row))) print '' print '-- comment this out when you are ready to *really* run the data import:' print 'rollback;' print '' print 'commit;'
def setUp(self): self.con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_COLNAMES) sqlite.register_converter("bin", BinaryConverterTests.convert)
from pysqlite2 import dbapi2 as sqlite class Point(object): def __init__(self, x, y): self.x, self.y = x, y def __conform__(self, protocol): if protocol is sqlite.PrepareProtocol: return "%f;%f" % (self.x, self.y) con = sqlite.connect(":memory:") cur = con.cursor() p = Point(4.0, -3.2) cur.execute("select ?", (p, )) print cur.fetchone()[0]
def connect(self): if self._conn is not None: return self self._conn = sqlite.connect(self._make_uri(), uri=True, detect_types=sqlite.PARSE_DECLTYPES | sqlite.PARSE_COLNAMES, factory=SQLiteConnection) self._conn.create_function('extract', 2, SQLiteExtract.extract) self._conn.create_function('date_trunc', 2, date_trunc) self._conn.create_function('split_part', 3, split_part) self._conn.create_function('to_char', 2, to_char) if sqlite.sqlite_version_info < (3, 3, 14): self._conn.create_function('replace', 3, replace) self._conn.create_function('now', 0, now) self._conn.create_function('greatest', -1, greatest) self._conn.create_function('least', -1, least) self._conn.create_function('bool_and', -1, bool_and) self._conn.create_function('bool_or', -1, bool_or) # Mathematical functions self._conn.create_function('cbrt', 1, cbrt) self._conn.create_function('ceil', 1, math.ceil) self._conn.create_function('degrees', 1, math.degrees) self._conn.create_function('div', 2, div) self._conn.create_function('exp', 1, math.exp) self._conn.create_function('floor', 1, math.floor) self._conn.create_function('ln', 1, math.log) self._conn.create_function('log', 1, math.log10) self._conn.create_function('mod', 2, math.fmod) self._conn.create_function('pi', 0, lambda: math.pi) self._conn.create_function('power', 2, math.pow) self._conn.create_function('radians', 1, math.radians) self._conn.create_function('sign', 1, sign) self._conn.create_function('sqrt', 1, math.sqrt) self._conn.create_function('trunc', 1, math.trunc) self._conn.create_function('trunc', 2, trunc) # Trigonomentric functions self._conn.create_function('acos', 1, math.acos) self._conn.create_function('asin', 1, math.asin) self._conn.create_function('atan', 1, math.atan) self._conn.create_function('atan2', 2, math.atan2) self._conn.create_function('cos', 1, math.cos) self._conn.create_function( 'cot', 1, lambda x: 1 / math.tan(x) if x else math.inf) self._conn.create_function('sin', 1, math.sin) self._conn.create_function('tan', 1, math.tan) # Random functions self._conn.create_function('random', 0, random.random) self._conn.create_function('setseed', 1, random.seed) # String functions self._conn.create_function('overlay', 3, SQLiteOverlay.overlay) self._conn.create_function('overlay', 4, SQLiteOverlay.overlay) self._conn.create_function('position', 2, SQLitePosition.position) if (hasattr(self._conn, 'set_trace_callback') and logger.isEnabledFor(logging.DEBUG)): self._conn.set_trace_callback(logger.debug) self._conn.execute('PRAGMA foreign_keys = ON') return self
def setUp(self): # trick tested methods into using a fake SMTP server Stub_smtp.reset() smtplib.SMTP = Stub_smtp from controller.Root import Root cherrypy.lowercase_api = True self.database = Database( Connection_wrapper( sqlite.connect(":memory:", detect_types=sqlite.PARSE_DECLTYPES, check_same_thread=False)), cache=Stub_cache(), ) self.database.execute_script(file("model/schema.sqlite").read(), commit=True) self.settings = { u"global": { u"server.environment": "production", u"session_filter.on": True, u"session_filter.storage_type": u"ram", u"encoding_filter.on": True, u"encoding_filter.encoding": "utf-8", u"decoding_filter.on": True, u"decoding_filter.encoding": "utf-8", u"server.log_to_screen": False, u"luminotes.http_url": u"http://luminotes.com", u"luminotes.https_url": u"https://luminotes.com", u"luminotes.http_proxy_ip": u"127.0.0.1", u"luminotes.https_proxy_ip": u"127.0.0.2", u"luminotes.support_email": "*****@*****.**", u"luminotes.payment_email": "*****@*****.**", u"luminotes.rate_plans": [ { u"name": u"super", u"storage_quota_bytes": 1337 * 10, u"notebook_collaboration": False, u"user_admin": False, u"included_users": 1, u"fee": 1.99, u"yearly_fee": 19.90, u"button": u"[subscribe here user %s!] button (modify=%s)", u"yearly_button": u"[yearly subscribe here user %s!] button (modify=%s)", }, { u"name": "extra super", u"storage_quota_bytes": 31337 * 1000, u"notebook_collaboration": True, u"user_admin": True, u"included_users": 3, u"fee": 9.00, u"yearly_fee": 90.00, u"button": u"[or here user %s!] button (modify=%s)", u"yearly_button": u"[yearly or here user %s!] button (modify=%s)", }, ], "luminotes.download_products": [ { "name": "local desktop extravaganza", "designed_for": "individuals", "storage_quota_bytes": None, "included_users": 1, "notebook_sharing": False, "notebook_collaboration": False, "user_admin": False, "fee": "30.00", "item_number": "5000", "filename": "test.exe", "button": u"", }, ], }, u"/files/download": { u"stream_response": True, u"encoding_filter.on": False, }, u"/files/download_product": { u"stream_response": True, u"encoding_filter.on": False, }, u"/notebooks/export_csv": { u"stream_response": True, u"encoding_filter.on": False, }, u"/files/progress": { u"stream_response": True, }, } cherrypy.root = Root(self.database, self.settings, suppress_exceptions=True) cherrypy.config.update(self.settings) cherrypy.server.start(init_only=True, server_class=None) # since we only want to test the controller, use the stub view for all exposed methods import controller.Expose Stub_view.result = None controller.Expose.view_override = Stub_view
def __init__(self, db_name): """ Opens the connection to the database. """ self.con = sqlite.connect(db_name)
def __init__(self, db_name): self.con = sqlite.connect(db_name)
def __init__(self, filename): try: self.conn = sqlite.connect(filename) self.cursor = self.conn.cursor() except: raise Exception("Connect To The SQLite Failed!")
def setUp(self): self.cx = sqlite.connect(":memory:") cu = self.cx.cursor() cu.execute("create table test(id integer primary key, name text)") cu.execute("insert into test(name) values (?)", ("foo", ))
def setUp(self): self.con = sqlite.connect(":memory:") self.cur = self.con.cursor() self.cur.execute( "create table test(id integer primary key, name text, bin binary, ratio number, ts timestamp)" )
help= 'Do not look for timestamp and receive station at the end of each line [default: with-uscg]' ) # parser.add_option('-p','--payload-table', dest='payload_table', default=False, action='store_true', # help='Add an additional table that stores the NMEA payload text') parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true', help='Make program output more verbose info as it runs') (options, args) = parser.parse_args() cx = sqlite.connect(options.databaseFilename) if options.create_tables: create_tables(cx, verbose=options.verbose) # create_tables(cx, options.payload_table, verbose=options.verbose) if len(args) == 0: args = (sys.stdin, ) print 'processing from stdin' for filename in args: print 'processing file:', filename load_data( cx, file(filename, 'r'), verbose=options.verbose,
def setUp(self): self.con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_DECLTYPES) self.cur = self.con.cursor() self.cur.execute("create table test(d date, ts timestamp)")
'31': "China", '41': "France", '51': "UK", } #Needed for decoding crits in details VEHICLE_DEVICE_TYPE_NAMES = ('engine', 'ammoBay', 'fuelTank', 'radio', 'track', 'gun', 'turretRotator', 'surveyingDevice') VEHICLE_TANKMAN_TYPE_NAMES = ('commander', 'driver', 'radioman', 'gunner', 'loader') ## Database SQLITE_DB = os.path.abspath(os.path.dirname(__file__)) + "/wot.db" from pysqlite2 import dbapi2 as sqlite3 db_conn = sqlite3.connect(SQLITE_DB) db_conn.row_factory = sqlite3.Row ## Logging settings default_formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(message)s") rtb_formatter = logging.Formatter("%(asctime)s: %(message)s") console_handler = StreamHandler() console_handler.setFormatter(default_formatter) default_handler = RotatingFileHandler("parser.log", "a", 1024 * 5, 3) default_handler.setLevel(logging.DEBUG) default_handler.setFormatter(default_formatter) root = logging.getLogger() root.addHandler(console_handler)
def setUp(self): self.con = sqlite.connect(":memory:") self.cur = self.con.cursor() self.cur.execute("create table test(i integer, s varchar, f number, b blob)")
filelist = os.popen(dircommand).readlines() filelist = [x.rstrip() for x in filelist] return filelist # trials trialdir = 'Mined_1840s_clean' triallist = getFileNames(trialdir) numdocs = len(triallist) # create tfidf directory if it doesn't exist tfidfdir = 'TFIDF_1840s' if os.path.exists(tfidfdir) == 0: os.mkdir(tfidfdir) # look up document frequencies in SQLite DB connection = sqlite.connect('docfreqs1840s.db') cursor = connection.cursor() # process each trial i = 0 for t in triallist: # provide feedback for user i += 1 print 'Processing %06d %s' % (i, t) sys.stdout.flush() # create a dictionary of unique words and word counts from trial trialstr = '' trialstr = open(trialdir + '\\' + t, 'r').read() allwords = trialstr.split(' ')
def test03_64bit_database(self): conn = sqlite3.connect('file:test.db?branches=on') c = conn.cursor() c.execute("insert into t1 values ('third')") conn.commit() c.execute("insert into t1 values ('fourth')") c.execute("insert into t1 values ('fifth')") c.execute("insert into t1 values ('sixth')") conn.commit() c.execute("select * from t1") self.assertListEqual(c.fetchall(), [("first",),("second",),("third",),("fourth",),("fifth",),("sixth",)]) c.execute("pragma branch=test") c.execute("pragma branch") self.assertEqual(c.fetchone()[0], "test") c.execute("select * from t1") self.assertListEqual(c.fetchall(), [("first",),("from test branch",)]) c.execute("pragma new_branch=sub-test1 at test." + str(v64bit_increment + 2)) c.execute("pragma new_branch=sub-test2 at test." + str(v64bit_increment + 3)) c.execute("pragma branches") self.assertListEqual(c.fetchall(), [("master",),("test",),("sub-test1",),("sub-test2",)]) c.execute("pragma branch") self.assertEqual(c.fetchone()[0], "sub-test2") c.execute("select * from t1") self.assertListEqual(c.fetchall(), [("first",),("from test branch",)]) c.execute("insert into t1 values ('from sub-test2 branch')") conn.commit() c.execute("pragma branch=sub-test1") c.execute("pragma branch") self.assertEqual(c.fetchone()[0], "sub-test1") c.execute("select * from t1") self.assertListEqual(c.fetchall(), [("first",)]) c.execute("pragma branch=sub-test2") c.execute("pragma branch") self.assertEqual(c.fetchone()[0], "sub-test2") c.execute("select * from t1") self.assertListEqual(c.fetchall(), [("first",),("from test branch",),("from sub-test2 branch",)]) c.execute("pragma branch=master." + str(v64bit_increment + 3)) c.execute("pragma branch") self.assertEqual(c.fetchone()[0], "master." + str(v64bit_increment + 3)) c.execute("select * from t1") self.assertListEqual(c.fetchall(), [("first",),("second",)]) c.execute("pragma branch=master." + str(v64bit_increment + 4)) c.execute("pragma branch") self.assertEqual(c.fetchone()[0], "master." + str(v64bit_increment + 4)) c.execute("select * from t1") self.assertListEqual(c.fetchall(), [("first",),("second",),("third",)]) conn.close()
def get_sqlite_memory_connection(): return sqlite3.connect(":memory:")
import os #from datetime import datetime #import sqlite3 from pysqlite2 import dbapi2 as sqlite3 data_dir = '/Users/xlx/proj/ImageNet/db' in_words_file = 'words.txt' in_gloss_file = "gloss.txt" wnet_13k = "wnet-50.txt" sqlite_db_file = 'wordnet_fts.db' """ store to SQlite """ conn = sqlite3.connect(os.path.join(data_dir, sqlite_db_file)) cur = conn.cursor() conn.execute("""DROP TABLE wordnet""") #conn.execute("""DELETE FROM wordnet_word""") conn.execute("""CREATE VIRTUAL TABLE wordnet USING fts3(wnid, words, gloss);""") conn.commit() wn_list = map(lambda s: s.strip().split()[1], open(os.path.join(data_dir, wnet_13k), 'rt')) gdict = {} for cl in open(os.path.join(data_dir, in_gloss_file), 'rt'): tmp = cl.split('\t') assert tmp[0] not in gdict, "duplicate wnid in gloss! " if tmp[0] in wn_list: gdict[tmp[0]] = tmp[1].strip() print " load %d gloss items" % len(gdict)
def __init__(self, app_id, datastore_file, require_indexes=False, verbose=False, service_name='datastore_v3', trusted=False, consistency_policy=None, root_path=None, use_atexit=True, auto_id_policy=datastore_stub_util.SEQUENTIAL): """Constructor. Initializes the SQLite database if necessary. Args: app_id: string datastore_file: string, path to sqlite database. Use None to create an in-memory database. require_indexes: bool, default False. If True, composite indexes must exist in index.yaml for queries that need them. verbose: bool, default False. If True, logs all select statements. service_name: Service name expected for all calls. trusted: bool, default False. If True, this stub allows an app to access the data of another app. consistency_policy: The consistency policy to use or None to use the default. Consistency policies can be found in datastore_stub_util.*ConsistencyPolicy root_path: string, the root path of the app. use_atexit: bool, indicates if the stub should save itself atexit. auto_id_policy: enum, datastore_stub_util.SEQUENTIAL or .SCATTERED """ datastore_stub_util.BaseDatastore.__init__( self, require_indexes, consistency_policy, use_atexit and datastore_file, auto_id_policy) apiproxy_stub.APIProxyStub.__init__(self, service_name) datastore_stub_util.DatastoreStub.__init__(self, weakref.proxy(self), app_id, trusted, root_path) self.__datastore_file = datastore_file self.__verbose = verbose self.__id_map_sequential = {} self.__id_map_scattered = {} self.__id_counter_tables = { datastore_stub_util.SEQUENTIAL: ('IdSeq', self.__id_map_sequential), datastore_stub_util.SCATTERED: ('ScatteredIdCounters', self.__id_map_scattered), } self.__id_lock = threading.Lock() if self.__verbose: sql_conn = SQLiteConnectionWrapper else: sql_conn = sqlite3.Connection self.__connection = sqlite3.connect(self.__datastore_file or ':memory:', timeout=_MAX_TIMEOUT, check_same_thread=False, factory=sql_conn) self.__connection.text_factory = lambda x: unicode( x, 'utf-8', 'ignore') self.__connection_lock = threading.RLock() self.__namespaces = set() self.__query_history = {} self._RegisterPseudoKind(KindPseudoKind()) self._RegisterPseudoKind(PropertyPseudoKind()) self._RegisterPseudoKind(NamespacePseudoKind()) self._RegisterPseudoKind(datastore_stub_util.EntityGroupPseudoKind()) try: self.__Init() except sqlite3.DatabaseError, e: raise apiproxy_errors.ApplicationError( datastore_pb.Error.INTERNAL_ERROR, self.READ_ERROR_MSG % (self.__datastore_file, e))
# vim: set fileencoding=utf-8: import os import sys import struct from lxml import etree from pysqlite2 import dbapi2 as sqlite3 db = os.path.join('resources', 'acc.dat') conn = sqlite3.connect(db) cur = conn.cursor() class ImportData: def __init__(self, bank, raw_data): self.data = None method = getattr(self, bank+'_handler', None) if callable(method): self.data = method(raw_data) def __call__(self): return self.data def ccb_handler(self, raw_data): idx_list = ['RcvAccNo', 'RcvAccName', 'CounterName', 'BranchName'] doc = etree.parse(raw_data).getroot() acc = [] for el in doc: info = []
def connect(self): import pysqlite2.dbapi2 as db connection = db.connect(self.database) connection.text_factory = str return connection
def open_database(): global db,conn,day_keep,dbfile try: from pysqlite2 import dbapi2 as sqlite except: print """ 本程序使用 sqlite 做数据库来保存数据,运行本程序需要 pysqlite的支持 python 访问 sqlite 需要到下面地址下载这个模块 pysqlite, 272kb http://initd.org/tracker/pysqlite/wiki/pysqlite#Downloads 下载(Windows binaries for Python 2.x) """ raise SystemExit try: db = sqlite.connect(dbfile,isolation_level=None) db.create_function("unix_timestamp", 0, my_unix_timestamp) conn = db.cursor() except: print "操作sqlite数据库失败,请确保脚本所在目录具有写权限" raise SystemExit sql=""" /* ip: 只要纯ip地址(xxx.xxx.xxx.xxx)的代理 */ /* type: 代理类型 2:高匿 1:普匿 0:透明 -1: 未知 */ /* status: 这个字段本程序还没有用到,留在这里作以后扩展*/ /* active: 代理是否可用 1:可用 0:不可用 */ /* speed: 请求相应时间,speed越小说明速度越快 */ CREATE TABLE IF NOT EXISTS `proxier` ( `ip` varchar(15) NOT NULL default '', `port` int(6) NOT NULL default '0', `type` int(11) NOT NULL default '-1', `status` int(11) default '0', `active` int(11) default NULL, `time_added` int(11) NOT NULL default '0', `time_checked` int(11) default '0', `time_used` int(11) default '0', `speed` float default NULL, `area` varchar(120) default '--', /* 代理服务器所在位置 */ PRIMARY KEY (`ip`) ); /* CREATE INDEX IF NOT EXISTS `type` ON proxier(`type`); CREATE INDEX IF NOT EXISTS `time_used` ON proxier(`time_used`); CREATE INDEX IF NOT EXISTS `speed` ON proxier(`speed`); CREATE INDEX IF NOT EXISTS `active` ON proxier(`active`); */ PRAGMA encoding = "utf-8"; /* 数据库用 utf-8编码保存 */ """ conn.executescript(sql) conn.execute("""DELETE FROM `proxier` where `time_added`< (unix_timestamp()-?) and `active`=0""",(day_keep*86400,)) conn.execute("select count(`ip`) from `proxier`") m1=conn.fetchone()[0] if m1 is None:return conn.execute("""select count(`time_checked`) from `proxier` where `time_checked`>0""") m2=conn.fetchone()[0] if m2==0: m3,m4,m5=0,"尚未检查","尚未检查" else: conn.execute("select count(`active`) from `proxier` where `active`=1") m3=conn.fetchone()[0] conn.execute("""select max(`time_checked`), min(`time_checked`) from `proxier` where `time_checked`>0 limit 1""") rs=conn.fetchone() m4,m5=rs[0],rs[1] m4=formattime(m4) m5=formattime(m5) print """ 共%(m1)1d条代理,其中%(m2)1d个代理被验证过,%(m3)1d个代理验证有效。 最近一次检查时间是:%(m4)1s 最远一次检查时间是: %(m5)1s 提示:对于检查时间超过24小时的代理,应该重新检查其有效性 """%{'m1':m1,'m2':m2,'m3':m3,'m4':m4,'m5':m5}
except AccessDenied, e: if password is None: import sys, getpass # FIXME: catch KeyboardInterrupt exception # FIXME: it only works on UNIX (/dev/tty), # not sure whether it's bug or a feature, though oldout, oldin = sys.stdout, sys.stdin sys.stdin = sys.stdout = open('/dev/tty', 'r+') password = getpass.getpass() sys.stdout, sys.stdin = oldout, oldin return create_database(driver, database, username, password, hostname) raise e return db if __name__ == '__main__': db = create_database('sqlite', '/tmp/foo.db') cnn = db.connect() cursor = cnn.cursor() db.create_tables(cursor) cursor.close() cnn.commit() cnn.close()
import sys ## Devel's python has "surprisingly" disabled sqlite3 support unlike 99.9% of sane python installations. import pysqlite2.dbapi2 as db import configuration ### Constants ### # full path of sqlite3 database DB = configuration.getdb() # full path of libspatialite.so.3 SPATIALPLUGIN = configuration.get_libspatialite() # creating/connecting the test_db con = db.connect(DB, check_same_thread=False) con.enable_load_extension(True) con.load_extension(SPATIALPLUGIN) con.enable_load_extension(False) def execute(sql, args=()): """ Execute sql using args for sql substitution Args: sql: SQL statement args (optional) : list of susbtitution values """ res = con.execute(sql, args) con.commit()
def __init__(self, database_name=os.path.join(BASE, 'BlockDataBase.db'), my_public_key=None): self.conn = sqlite3.connect(database_name) cursor = self.conn.cursor() create_multichain_table = u""" CREATE TABLE IF NOT EXISTS multi_chain( up INTEGER NOT NULL, down INTEGER NOT NULL, total_up UNSIGNED BIG INT NOT NULL, total_down UNSIGNED BIG INT NOT NULL, public_key TEXT NOT NULL, sequence_number INTEGER NOT NULL, link_public_key TEXT NOT NULL, link_sequence_number INTEGER NOT NULL, previous_hash TEXT NOT NULL, signature TEXT NOT NULL, insert_time TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, block_hash TEXT NOT NULL, PRIMARY KEY (public_key, sequence_number) ); """ create_member_table = u""" CREATE TABLE IF NOT EXISTS member( identity TEXT, public_key TEXT, insert_time TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL ) """ create_visit = u""" CREATE TABLE IF NOT EXISTS visit( ip TEXT, port INT, public_key TEXT, insert_time TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL ) """ create_visit_count = u""" CREATE TABLE IF NOT EXISTS visit_count( ip TEXT, port INT, public_key TEXT, insert_time TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL ) """ cursor.execute(create_multichain_table) cursor.execute(create_member_table) cursor.execute(create_visit) cursor.execute(create_visit_count) self.conn.commit() blocks = self.get_all_blocks() self.my_public_key = my_public_key self.trust_graph = TrustGraph(blocks=blocks, my_public_key=my_public_key)
def setdb(self, dbfile): self.con = sqlite.connect(dbfile) self.con.execute( 'create table if not exists fc(feature,category,count)') self.con.execute('create table if not exists cc(category,count)')
from pysqlite2 import dbapi2 as sqlite connection = sqlite.connect('jeopardy.db') cursor = connection.cursor() cursor.execute("SELECT name FROM category LIMIT 10") results = cursor.fetchall() print "Example categories:\n" for category in results: print category[0] cursor.execute("SELECT text, answer, value FROM clue LIMIT 10") results = cursor.fetchall() print "\nExample clues:\n" for clue in results: text, answer, value = clue print "[$%s]" % (value, ) print "A: %s" % (text, ) print "Q: What is '%s'" % (answer, ) print "" cursor.execute("SELECT category.name, clue.text, clue.answer \ FROM clue, category WHERE clue.category=category.id \ AND category.name LIKE '%MYTHOLOGY%' LIMIT 10") results = cursor.fetchall() print "\nExample MYTHOLOGY clues:\n" for clue in results: name, text, answer = clue