def conn(db, host=None): if not os.uname()[1].startswith('tools-webgrid'): # Não tentar acessar o bd fora do Labs return False wikis = { u'Wikipédia': 'ptwiki', u'Wikilivros': 'ptwikibooks', u'Wikiversidade': 'ptwikiversity', u'Wikcionário': 'ptwiktionary', u'Wikinotícias': 'ptwikinews', u'Wikiquote': 'ptwikiquote', u'Wikisource': 'ptwikisource', u'Wikivoyage': 'ptwikivoyage' } try: if host: connection = oursql.connect( db=db, host=host, read_default_file=os.path.expanduser('~/replica.my.cnf')) else: db = db in wikis and wikis[db] or db connection = oursql.connect( db=db + '_p', host=db + '.labsdb', read_default_file=os.path.expanduser('~/replica.my.cnf')) return connection.cursor() except: return False
def __init__(self, user_name=None, password=None, host=None, database='p_wppb', wp_database='dewiki_p'): """ Constructor. *wp_database* may be `None`. Otherwise it should be the wiki’s database name, e. g. 'dewiki_p'. """ # try to read the replica.pb-db.cnf import ConfigParser import os.path import pb_db_config p = pb_db_config.db_conf_file if os.path.exists(p): parser = ConfigParser.ConfigParser() parser.readfp(open(p)) if parser.has_section('client'): if parser.has_option('client', 'user') and user_name is None: user_name = string.strip(parser.get('client', 'user'), '"\'') if (parser.has_option('client', 'password') and password is None): password = string.strip(parser.get('client', 'password'), '"\'') if parser.has_option('client', 'host') and host is None: host = string.strip(parser.get('client', 'host'), '"\'') if user_name is None or password is None or host is None: raise WPPBException( u'You did not specify enough information on' + u' the database connection. The ~/replica.pb-db.cnf ' + u'file did not contain the required ' + u'information.') try: # Workaround for the outage of c2.labsdb: the project database is # moved to tools-db. ireas/2016-02-16 self.conn = oursql.connect(host='tools-db', user=user_name, passwd=password, db=database) self.pb_database_name = database self.wp_conn = None if wp_database != None: self.wp_conn = oursql.connect(host=host, user=user_name, passwd=password, db=wp_database, charset='utf8', use_unicode=True) except oursql.DatabaseError, e: raise WPPBException(u'You specified wrong database connection ' + u'data. Error message: ' + unicode(e))
def conn(db, host=None): """ Conecta ao banco de dados """ if host: connection = oursql.connect(db=db, host=host, read_default_file=os.path.expanduser('~/replica.my.cnf'), read_timeout=10, charset='utf8', use_unicode=True, autoreconnect=True, autoping=True) else: connection = oursql.connect(db=db + '_p', host=db + '.labsdb', read_default_file=os.path.expanduser('~/replica.my.cnf'), read_timeout=10, charset='utf8', use_unicode=True, autoreconnect=True, autoping=True) return connection.cursor()
def mysql_query(query, params=(), dbname=None, encoding='utf-8', verbose=None): """ Yield rows from a MySQL query. An example query that yields all ns0 pages might look like:: SELECT page_namespace, page_title, FROM page WHERE page_namespace = 0; @param query: MySQL query to execute @type query: str @param params: input parametes for the query, if needed @type params: tuple @param dbname: db name @type dbname: str @param encoding: encoding used by the database @type encoding: str @param verbose: if True, print query to be executed; if None, config.verbose_output will be used. @type verbose: None or bool @return: generator which yield tuples """ if verbose is None: verbose = config.verbose_output if config.db_connect_file is None: conn = mysqldb.connect(config.db_hostname, db=config.db_name_format.format(dbname), user=config.db_username, passwd=config.db_password, port=config.db_port) else: conn = mysqldb.connect(config.db_hostname, db=config.db_name_format.format(dbname), read_default_file=config.db_connect_file, port=config.db_port) cursor = conn.cursor() if verbose: pywikibot.output('Executing query:\n%s' % query) query = query.encode(encoding) params = tuple(p.encode(encoding) for p in params) if params: cursor.execute(query, params) else: cursor.execute(query) for row in cursor: yield row cursor.close() conn.close()
def dbConnect(self): """Basic sql connect which creates a cursor to execute queries """ #try first host try: conn = oursql.connect(host = self.dbHost1, user=self.dbUsername, passwd=self.dbPassword,db=self.database, use_unicode=False, charset=None, port=3306) #if no first host, try second host except: conn = oursql.connect(host = self.dbHost2, user=self.dbUsername, passwd=self.dbPassword,db=self.database, use_unicode=False, charset=None, port=3306) curs = conn.cursor(oursql.DictCursor) curs = conn.cursor(try_plain_query=False) return curs
def select_data(user_id, campaign_id): import oursql #get db name farm = int((user_id % 100) / 20) + 1 db_name='hoge_aprop' + str(farm) #get table name tbl_name = 'campaign_raid_jyanken_' + str(campaign_id) #make query sql = 'SELECT hand FROM ' + tbl_name + ' where user_id = ' + str(user_id) conn = oursql.connect(host='localhost', user='******', passwd='hoge', db=db_name) cur = conn.cursor() cur.execute(sql) ret = cur.fetchall() tmp = [] for v in ret: tmp.append(v[0]) result = tmp[len(tmp)-13:len(tmp)] return result
def run(self, **kwargs): """Entry point for a task event.""" if not self.db_access_lock.acquire(False): # Non-blocking self.logger.info("A job is already ongoing; aborting") return action = kwargs.get("action", "all") try: start = time() conn = oursql.connect(**self.conn_data) site = self.bot.wiki.get_site() if action in ["all", "update_volunteers"]: self.update_volunteers(conn, site) if action in ["all", "clerk"]: log = u"Starting update to [[{0}]]".format(self.title) self.logger.info(log) cases = self.read_database(conn) page = site.get_page(self.title) text = page.get() self.read_page(conn, cases, text) notices = self.clerk(conn, cases) if self.shutoff_enabled(): return if not self.save(page, cases, kwargs, start): return self.send_notices(site, notices) if action in ["all", "update_chart"]: if self.shutoff_enabled(): return self.update_chart(conn, site) if action in ["all", "purge"]: self.purge_old_data(conn) finally: self.db_access_lock.release()
def run(self, **kwargs): """Entry point for a task event. Depending on the kwargs passed, we will either synchronize our local statistics database with the site (self.sync()) or save it to the wiki (self.save()). We will additionally create an SQL connection with our local database. """ action = kwargs.get("action") if not self.db_access_lock.acquire(False): # Non-blocking if action == "sync": self.logger.info("A sync is already ongoing; aborting") return self.logger.info("Waiting for database access lock") self.db_access_lock.acquire() try: self.site = self.bot.wiki.get_site() self.conn = oursql.connect(**self.conn_data) try: if action == "save": self.save(kwargs) elif action == "sync": self.sync(kwargs) elif action == "update": self.update(kwargs) finally: self.conn.close() finally: self.db_access_lock.release()
def get_connection_beforerjan2011(): conn = oursql.connect(host="10.0.0.124", # your host, usually localhost user="******", # your username passwd="voxpop", # your password db="voxpop", use_unicode=False) return conn
def get_connection_old(): conn = oursql.connect(host="10.0.0.125", # your host, usually localhost user="******", # your username passwd="voxpop", # your password db="voxpop", use_unicode=False) return conn
def main(): conf = ConfigParser.ConfigParser() options, arg = interface() conf.read(options.conf) conn = oursql.connect( user=conf.get('Database','USER'), passwd=conf.get('Database','PASSWORD'), db=conf.get('Database','DATABASE') ) cur = conn.cursor() createAccessionTable(cur) conn.commit() #counter = 0 for line in open(options.input, 'rU').readlines(): if '\t' in line: line = line.split('\t') chromo, start, stop = line[0:3] acc = line[3].split('.')[0] overlaps = getGeneOverlap(cur, chromo, start, stop) if overlaps: for gene in overlaps: #pdb.set_trace() cur.execute('''INSERT INTO annotation (gene_id, long_accession, accession) VALUES (?,?, ?)''', (gene[0], line[3], acc)) else: pdb.set_trace()
def _setup_db(self): self.db = oursql.connect( host = self.options.host, port = self.options.port, user = self.options.user, passwd = self.options.pwd, db = self.options.db)
def _cursor(self): if not self._valid_connection(): kwargs = { 'charset': 'utf8', 'use_unicode': True, } settings_dict = self.settings_dict if settings_dict['USER']: kwargs['user'] = settings_dict['USER'] if settings_dict['NAME']: kwargs['db'] = settings_dict['NAME'] if settings_dict['PASSWORD']: kwargs['passwd'] = settings_dict['PASSWORD'] if settings_dict['HOST'].startswith('/'): kwargs['unix_socket'] = settings_dict['HOST'] elif settings_dict['HOST']: kwargs['host'] = settings_dict['HOST'] if settings_dict['PORT']: kwargs['port'] = int(settings_dict['PORT']) opts = settings_dict['OPTIONS'] if 'autoreconnect' in opts: kwargs['autoreconnect'] = opts['autoreconnect'] # We need the number of potentially affected rows after an # "UPDATE", not the number of changed rows. kwargs['found_rows'] = True # TODO: support for 'init_command' kwargs.update(settings_dict['OPTIONS']) self.connection = Database.connect(**kwargs) # XXX: oursql does not have encoders like mysqldb -- unknown if this is still needed # self.connection.encoders[SafeUnicode] = self.connection.encoders[unicode] # self.connection.encoders[SafeString] = self.connection.encoders[str] connection_created.send(sender=self.__class__) cursor = CursorWrapper(self.connection.cursor()) return cursor
def main(): db = oursql.connect(raise_on_warnings=False, read_default_file=os.path.expanduser("~/.my.cnf"), ) cursor = db.cursor() for year in gen(): do_month(cursor, year)
def main(): print "Please insert SPARTAN DB credentials:" username = raw_input("Username: "******"spartan-db.oslo.osa", username, password, db="spartan") cursor = conn.cursor() run(cursor, "http://t/core/standards/scripts/opjsunit/", True)
def db_access(values,date_limit_str): try: conn = oursql.connect(user='******', passwd='trollface',db='weather', port=3306)#connect to mysql database printDBG("DB: connected!") except: print("DB: ERR- unable to connect to mysql db, check for running daemon, database existence and table existence") exit(1) curs = conn.cursor()#create cursor if not dry: curs.execute('INSERT INTO `data` (stamp,P0,T0,H0,L0) VALUES (?, ?, ?, ?, ?)',(start_t.strftime("%Y-%m-%d %H:%M:00"),values["P0"], values["T0"], values["H0"], values["L0"]))#save values into database printDBG("DB: data inserted into table") else: printDBG("DB: dry run! no db write done") if GENchart: # query="SELECT stamp,T0,P0,H0,L0 FROM data ORDER BY stamp DESC WHERE stamp > \'"+date_limit_str+"\' LIMIT "+str(PLOT_STEPS)+" ;" query="SELECT stamp,T0,P0,H0,L0 FROM data WHERE stamp > \'"+date_limit_str+"\' ORDER BY stamp DESC LIMIT "+str(PLOT_STEPS)+" ;" printDBG("DB: requesting data\nquery:",query) curs.execute(query)#request last 300 lines of T0 (temperature) db_data=curs.fetchall() #receive this printDBG("DB: data read") else: printDBG("DB: no data read") db_data=[] curs.close()#destruct cursor conn.close()#disconnect from db printDBG("DB: connection closed") return db_data
def connect_to_database(database, host): default_file = os.path.expanduser('~/replica.my.cnf') if not os.path.isfile(default_file): raise Exception('Database access not configured for this account!') return oursql.connect(host=host, db=database, read_default_file=default_file)
def ht_db_connect(read_default_file=DB_CONFIG_PATH): connection = oursql.connect(db=HT_DB_NAME, host=HT_DB_HOST, read_default_file=read_default_file, charset=None, use_unicode=False) return connection
def main(): conf = ConfigParser.ConfigParser() options, arg = interface() conf.read(options.conf) conn = oursql.connect( user=conf.get('Database','USER'), passwd=conf.get('Database','PASSWORD'), db=conf.get('Database','DATABASE') ) cur = conn.cursor() cur.execute('''SELECT distance_id, distance_close_target, left_p, right_p from primers where primer = 0''') data = cur.fetchall() #pdb.set_trace() outp = open(options.output, 'w') outp.write('''track name=consPrimers description="Primers" itemRgb=1 useScore=0\n''') for d in data: iden, ct, lp, rp = d # get the positions of each locus and find the smallest start pos # we need to do this relative to zebra finch and then transform it to # chicken cur.execute('''SELECT id, target_chromo, target_cons_start, target_cons_end FROM cons WHERE id in (?, ?) ORDER BY target_cons_end''', (iden, ct)) #pdb.set_trace() positions = cur.fetchall() start = sorted([positions[0][2], positions[1][2]])[0] end = sorted([positions[0][3], positions[1][3]])[1] #pdb.set_trace() d_pos = sorted([d[0], d[1]]) upper_name = '{0}-{1}_upper'.format(d_pos[0], d_pos[1]) lower_name = '{0}-{1}_lower'.format(d_pos[0], d_pos[1]) #upper_name = '{0}_upper'.format(positions[0][0]) #lower_name = '{0}_lower'.format(positions[0][0]) # determine the actual position of the primer #pdb.set_trace() rp_start_temp = (start + int(rp.split(',')[0])) - end rp_end_temp = rp_start_temp - int(rp.split(',')[1]) if not options.chicken: chromo = positions[0][1] lp_start = start + int(lp.split(',')[0]) lp_end = lp_start + int(lp.split(',')[1]) rp_end = end + (rp_start_temp) rp_start = end + (rp_end_temp) if options.chicken: cur.execute('''SELECT id, query_chromo, query_cons_start, query_cons_end FROM cons WHERE id in (?, ?) ORDER BY query_cons_end''', (iden, ct)) positions = cur.fetchall() chromo = positions[0][1] start = sorted([positions[0][2], positions[1][2]])[0] end = sorted([positions[0][3], positions[1][3]])[1] lp_start = start + int(lp.split(',')[0]) - 1 lp_end = lp_start + int(lp.split(',')[1]) rp_end = end + (rp_start_temp) rp_start = end + (rp_end_temp) outp.write('{0} {1} {2} {3} 1000 + {1} {2} 255,0,0\n'.format(chromo, lp_start, lp_end, upper_name)) outp.write('{0} {1} {2} {3} 1000 - {1} {2} 0,0,255\n'.format(chromo, rp_start, rp_end, lower_name)) outp.close() conn.close()
def get_cursor(uri): if uri['host'] in get_cursor.conns.keys(): return get_cursor.conns[uri['host']].cursor(oursql.DictCursor) get_cursor.conns[uri['host']] = oursql.connect(**uri) return get_cursor.conns[uri['host']].cursor(oursql.DictCursor)
def get_abandoned_carts(self, minutes): """ Get List of Abandoned Carts """ logging.info("Getting abandoned carts %d minutes old." % minutes) conn = oursql.connect(host=self.host, user=self.username, passwd=self.password, db=self.db) curs = conn.cursor(oursql.DictCursor) sql = ( "SELECT users.email, users.fname " "FROM orders " "inner join users on (users.uid = orders.uid) " "where orders.status = 'Abandon' " "and users.email <> '' " "and create_date < (NOW() - INTERVAL " + str(minutes) + " MINUTE) " "order by create_date desc") logging.debug(sql) curs.execute(sql) carts = [] rows = curs.fetchall() for row in rows: cart = (row['email'], row['fname']) logging.debug(cart) carts.append(cart) logging.info("Found %d abandoned carts." % len(carts)) return carts
def suspected_diffs(q): import oursql import dbsettings con = oursql.connect(host=dbsettings.reporter_db_host, db='{}__copyright_p'.format(dbsettings.db_username), read_default_file=dbsettings.connect_file, use_unicode=True, charset='utf8') cursor = con.cursor() columns = [ 'project', 'lang', 'diff', 'diff_timestamp', 'page_title', 'page_ns', 'ithenticate_id' ] where_cols = [] value_cols = [] for col in columns: if col in q: where_cols.append(col) value_cols.append(q[col][0].decode('utf8')) where = '' if 'report' in q: columns.append('report') if len(where_cols): where = ' where ' + ' AND '.join([x + '= ?' for x in where_cols]) cursor.execute( 'select ' + ', '.join(columns) + ' from copyright_diffs' + where + ' order by diff_timestamp desc limit 50', value_cols) for data in cursor: yield dict((col, str(data[i])) for i, col in enumerate(columns) if col not in where_cols)
def insert(): print 'Populating the protected_titles table...' conn = oursql.connect(host=settings.db_host, user=settings.db_user, passwd=settings.db_pass, db=settings.db_name) cur = conn.cursor() cur.executemany('INSERT INTO `protected_titles` VALUES (?,?,?,?,?,?,?);', parse(gen())) conn.close()
def db_connect(db, host, read_default_file=DB_CONFIG_PATH): connection = oursql.connect(db=db, host=host, read_default_file=read_default_file, charset=None, use_unicode=False) return connection
def extractVectors(config, inputPickle, outputPickle): with open(inputPickle, "r") as f: song_to_track = pickle.load(f) songs = [] vectors = [] with oursql.connect( host=config.get("mysql", "host"), user=config.get("mysql", "username"), passwd=config.get("mysql", "password"), port=config.getint("mysql", "port"), db=config.get("mysql", "database"), ) as conn: with conn as cursor: for (song, track) in song_to_track.iteritems(): cursor.execute("""SELECT data FROM Track WHERE echonest_id = ? LIMIT 1""", (track,)) for (data,) in cursor: songs.append(song) vectors.append(pickle.loads(data)) with open(outputPickle, "w") as f: pickle.dump({"songs": songs, "vectors": vectors}, f) pass
def show_api(): configs = ['config.no.json', 'config.nn.json'] projects = [] for configfile in configs: config = json.load(open(HOME + configfile, 'r')) sql = oursql.connect(host=config['local_db']['host'], db=config['local_db']['db'], charset='utf8', use_unicode=True, read_default_file=os.path.expanduser('~/replica.my.cnf')) cur = sql.cursor() cur2 = sql.cursor() cur.execute(u'SELECT category, COUNT(article) FROM articles GROUP BY category') cats = [] for cat in cur.fetchall(): cur2.execute(u'SELECT membercount, ts FROM stats WHERE category=? ORDER BY ts DESC LIMIT 20', [cat[0]]) stats = [] for s in cur2.fetchall(): stats.append({'value': s[0], 'timestamp': s[1]}) cats.append({'name': cat[0], 'membercount': stats}) p = { 'host': config['host'], 'template': config['template'], 'categories': cats } projects.append(p) cur.close() cur2.close() return flask.jsonify(projects=projects)
def connectDatabase(self, hostName=None, dbName=None): ''' Connect to the database associated with our Wikipedia, or a given server and database if host/database names are supplied. @param hostName: hostname of the server we're connecting to @type hostName: str @param dbName: name of the database we will be using @type dbName: str ''' if not hostName: hostName = u"{lang}wiki.labsdb".format(lang=self.lang) dbName = u"{lang}wiki_p".format(lang=self.lang) if self.dbConn: self.disconnectDatabase() try: self.dbConn = oursql.connect(db=dbName, host=hostName, read_default_file=os.path.expanduser(self.mysqlConf), use_unicode=False, charset=None) self.dbCursor = self.dbConn.cursor() except oursql.Error, e: logging.error("unable to connect to database {0} on server {1}".format(dbName, hostname)) logging.error("oursqul error {0}: {1}".format(e.args[0], e.args[1])) return False
def __init__(self, host=None, username=None, password=None, port=3306): for attr in ("host", "username", "password", "port"): setattr(self, attr, locals()[attr]) credentials = {"host": self.host, "user": self.username, "passwd": self.password, "port": self.port} self.__conn = oursql.connect(**credentials)
def get_connection(): conn = oursql.connect( host="10.0.0.125", user="******", passwd="voxpop", db="voxpop", use_unicode = False) return conn
def get_blog_subscribers(self): """ Gets WordPress Blog Subscribers """ logging.info("Getting blog subscribers.") conn = oursql.connect(host=self.host, user=self.username, passwd=self.password, db=self.db) curs = conn.cursor(oursql.DictCursor) sql = ( "SELECT users.user_email, users.display_name " "FROM admin_wpdb.wp_usermeta meta " "INNER JOIN admin_wpdb.wp_users users ON " " (users.ID = meta.user_id) " "WHERE meta.meta_key = 'wp_capabilities' " "AND meta.meta_value LIKE '%subscriber%' " "ORDER BY users.ID") logging.debug(sql) try: curs.execute(sql) except Error as error: logging.error(error.message) raise Exception(error.message) users = [] rows = curs.fetchall() for row in rows: user = (row['user_email'], row['display_name']) logging.debug(user) users.append(user) logging.info("Found %d total blog subscribers." % len(users)) return users
def _sql_connect(self, **kwargs): """Attempt to establish a connection with this site's SQL database. oursql.connect() will be called with self._sql_data as its kwargs. Any kwargs given to this function will be passed to connect() and will have precedence over the config file. Will raise SQLError() if the module "oursql" is not available. oursql may raise its own exceptions (e.g. oursql.InterfaceError) if it cannot establish a connection. """ if not oursql: e = "Module 'oursql' is required for SQL queries." raise exceptions.SQLError(e) args = self._sql_data for key, value in kwargs.iteritems(): args[key] = value if "read_default_file" not in args and "user" not in args and "passwd" not in args: args["read_default_file"] = expanduser("~/.my.cnf") if "autoping" not in args: args["autoping"] = True if "autoreconnect" not in args: args["autoreconnect"] = True self._sql_conn = oursql.connect(**args)
def display(sphinx_id): db = oursql.connect(user='******', passwd='codesearch', db='codesearch') cursor = db.cursor(oursql.DictCursor) query = 'SELECT project, path, text FROM documents WHERE id = ?' cursor.execute(query, (sphinx_id,)) sourcecode = cursor.fetchone() if sourcecode is None: flask.abort(404) title = posixpath.join(sourcecode['project'], sourcecode['path']) try: lexer = get_lexer_for_filename(sourcecode['path']) except ClassNotFound: # Syntax highlighting not supported.' code = u'<pre>{}</pre>'.format(sourcecode['text']) return flask.render_template('display.html', title=title, code=code) formatter = HtmlFormatter() # Highlighting large files can be a slow operation. This is a candidate # for caching. checksum = zlib.adler32(sourcecode['text']) key = json.dumps(['HIGHLIGHT', checksum]) code = cache.get(key) if code is None: code = highlight(sourcecode['text'], lexer, formatter) cache.set(key, code) return flask.render_template('display.html', title=title, code=code)
def insert(): print 'Populating the page_restrictions table...' conn = oursql.connect(host=settings.db_host, user=settings.db_user, passwd=settings.db_pass, db=settings.db_name) cur = conn.cursor() cur.executemany('INSERT IGNORE INTO `page_restrictions` VALUES (?,?,?,?,?,?,?);', parse(full_gen())) conn.close()
def connect(host, port, user, passwd, dbname, servercfg): log.debug("connecting to database %s on %s@%s" % (dbname, user, host)) db = oursql.connect(host=host, port=port, user=user, passwd=passwd, db=dbname) return MessengerDb(db, servercfg)
def main(): db = oursql.connect( raise_on_warnings=False, read_default_file=os.path.expanduser("~/.my.cnf"), ) cursor = db.cursor() for year in gen(): do_month(cursor, year)
def count(): db = oursql.connect(db='u_sigma_webcite_p', host="sql-user-l.toolserver.org", read_default_file="/home/legoktm/.my.cnf") cur = db.cursor() cur.execute("SELECT table_name, table_rows FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = 'u_sigma_webcite_p' AND TABLE_NAME='new_links';") query = cur.fetchall() res = query[0][1] db.close() return res
def connect(self, read_default_file=DB_CONFIG_PATH): with tlog.critical('connect') as rec: self.connection = oursql.connect(db=HT_DB_NAME, host=HT_DB_HOST, read_default_file=read_default_file, charset=None, use_unicode=False, autoping=True)
def saveLuxData(self): conn = oursql.connect(host=db["host"], port=db["port"], db=db["name"], user=db["user"], passwd=db["pass"]) self.saveLux2Database(conn) self.saveLux2Graph(conn)
def get_db(): if not g._db: args = cache.bot.config.wiki["_copyviosSQL"] args["read_default_file"] = expanduser("~/.my.cnf") args["autoping"] = True args["autoreconnect"] = True g._db = oursql.connect(**args) return g._db
def __init__(self, lang='en'): self.lang = lang self.db_title = lang + 'wiki_p' self.db_host = lang + 'wiki.labsdb' self.connection = oursql.connect(db=self.db_title, host=self.db_host, read_default_file=DB_CONFIG_PATH, charset=None)
def ht_db_connect(): connection = oursql.connect(db=app.config['HT_DB_NAME'], host=app.config['HT_DB_HOST'], user=app.config['DB_USER'], passwd=app.config['DB_PASSWORD'], charset=None, use_unicode=False) return connection
def oursql_no_port_connect(): # 省略port no_port_conn = oursql.connect(MYSQL_CONF['host'], MYSQL_CONF['name'], MYSQL_CONF['pwd'], db=MYSQL_CONF['db']) sql = common_select_sql() res = common_select_conn(no_port_conn, sql) return res
def oursql_keyword_connect(): keyword_conn = oursql.connect(host=MYSQL_CONF['host'], port=MYSQL_CONF['port'], user=MYSQL_CONF['name'], passwd=MYSQL_CONF['pwd'], db=MYSQL_CONF['db']) sql = common_select_sql() res = common_select_conn(keyword_conn, sql) return res
def oursql_position_connect(): # 标准位置参数连接 position_conn = oursql.connect(MYSQL_CONF['host'], MYSQL_CONF['name'], MYSQL_CONF['pwd'], port=MYSQL_CONF['port'], db=MYSQL_CONF['db']) sql = common_select_sql() res = common_select_conn(position_conn, sql) return res
def main(): conf = ConfigParser.ConfigParser() options, args = interface() if options.conf: conf.read(options.conf) # build our configuration if options.db: conn = oursql.connect( user=conf.get('Database','USER'), passwd=conf.get('Database','PASSWORD'), db=conf.get('Database','DATABASE') ) cur = conn.cursor() #genomes = ['hg19', 'venter', 'chinese', 'korean', 'panTro2'] #genomes = [ #'129S1_SvImJ_Mouse_Genome', #'129S5_Mouse_Genome', #'C57BL_6N_Mouse_Genome', #'CAST_Ei_Mouse_Genome', #'NOD_Mouse_Genome', #'NZO_Mouse_Genome', #'PWK_Ph_Mouse_Genome', #'Spretus_Ei_Mouse_Genome', #'WSB_Ei_Mouse_Genome', #'mm9', #'rn4' #] if options.db: try: cur.execute('''CREATE TABLE species ( name varchar(7) NOT NULL, description varchar(100) NULL, version text NULL, PRIMARY KEY (name)) ENGINE=InnoDB''') except: # need better handling here for tables that exist pass if options.readlist: for g in options.readlist: #pdb.set_trace() output_file = os.path.abspath(os.path.join(options.output, "all_probes_v_{0}.lastz".format(g))) exc_str = '''/Users/bcf/git/brant/seqcap/Alignment/run_lastz.py \ --target=/nfs/data1/genomes/Genomes/{3}{0}/{0}.2bit \ --query={1}\ --nprocs=6 \ --output={2} --huge'''.format(g, options.probefile, output_file, options.fish) os.system(exc_str) if options.db: rest_of_steps(cur, g, output_file) try: cur.execute('INSERT INTO species (name) VALUES (?)', (g,)) except oursql.IntegrityError, e: if e == 1062: pass
def main(): page = wikitools.Page(wiki, base) db = oursql.connect(db='wikidatawiki_p', host="sql-s5", read_default_file=os.path.expanduser("~/.my.cnf"), charset=None, use_unicode=False) text = 'This report spans multiple subpages. It was last run at <onlyinclude>~~~~~</onlyinclude>.' text += run(db, 'P17', 'P30') #country w/o continent text += run(db, 'P21', 'P107') #sex w/o entity type page.edit(text, summary='Bot: Updating database report', bot=1)
def run_query(query, query_params, lang): db_title = lang + 'wiki_p' db_host = lang + 'wiki.labsdb' connection = oursql.connect(db=db_title, host=db_host, user=app.config['DB_USER'], passwd=app.config['DB_PASSWORD'], charset=None) cursor = connection.cursor(oursql.DictCursor) cursor.execute(query, query_params) ret = cursor.fetchall() return ret