def mail(self): password = self.__getMySQLPassword() try: db = MySQLdb.connection(self.base.mysql['host'], self.base.mysql['user'], password) except Exception: db = MySQLdb.connection(self.base.mysql['host'], self.base.mysql['user'], '') except Exception, msg: error_message(msg)
def mail(self): password = self.__getMySQLPassword() try: db = MySQLdb.connection( self.base.mysql['host'], self.base.mysql['user'], password ) except Exception: db = MySQLdb.connection( self.base.mysql['host'], self.base.mysql['user'], '' ) except Exception, msg: error_message(msg)
def authenticate(u,p): # Establich a connection db = MySQLdb.connection(host="localhost", user="******", passwd="xxx", db="xxx") result = 0 # Run a MySQL query from Python and get the result set query="""SELECT squid_user, squid_pass FROM surfwijzer_users WHERE squid_enabled='1' AND squid_user='******' AND squid_pass='******'""" db.query(query) r = db.store_result() # Iterate through the result set # Example calls back up to 100 rows result=0 for row in r.fetch_row(1): #print row[0] #print row[1] #print row[2] if row[1]==p: result=1 return result
def get_connection(self): conn = MySQLdb.connection(host=self.params.get("host"), port=self.params.get("port"), user=self.params.get("user"), password=self.params.get("password"), database=self.params.get("db")) return conn
def wms_balancing_arbiter(): '''wms_balancing_arbiter() -> updating wms instances available behind an alias depending on the load of the instances according to the load metric provided by wms_balancing_metric function Return None if errors are raised during calculation. ''' import os, commands, sys, fpformat sys.path.append('../common') import time import datetime import readconf_func import logging import socket import MySQLdb import logpredef logger = logging.getLogger('wms_balancing_arbiter') conf=readconf_func.readconf() #+++++++++++++++++++++++++++++ #Opening myslq db connection try: db = MySQLdb.connection(host=conf.get('WMSMON_DB_HOST'),user=conf.get('WMSMON_DB_USER'),passwd=conf.get('WMSMON_DB_PWD'),db=conf.get('WMSMON_DB_NAME')) logger.info("Starting db connection") except Exception,e: strxx= "ERROR CONNECTING TO WMSMonitor DB: " + str(e) logger.error(strxx) logger.error("ERROR: Please check mysql daemon is running and connection parameters are correct!") sys.exit(1)
def host_usagetest_consumer(): '''host_usagetest_consumer() -> takes usage test results from producers of such a metric ( 1-Nagios, 2-url of a UI) and populates WMSMonitor database ''' import os, commands, sys, fpformat sys.path.append('../common') import time import datetime import readconf_func import logging import socket import MySQLdb import urllib confvar=readconf_func.readconf() #CONNECTING TO DB #Opening myslq db connection logger.info("Starting db connection") try: db = MySQLdb.connection(host=confvar.get('WMSMON_DB_HOST'),user=confvar.get('WMSMON_DB_USER'),passwd=confvar.get('WMSMON_DB_PWD'),db=confvar.get('WMSMON_DB_NAME')) except Exception,e: stri2= "ERROR CONNECTING TO WMSMonitor DB: " + str(e) logger.error(stri2) logger.error("ERROR: Please check mysql daemon is running and connection parameters are correct!") sys.exit(1)
def __init__(self): framework.server.__init__(self, 7001, 20, framework.GlobalHandler, False) self.db = MySQLdb.connection( host="127.0.0.1", user="******", passwd="", db="aoc", ) self.packetMgr = { "PlayerAgent": { 0: onAuthenticate.onAuthenticate, 1: onCreateCharacter.onCreateCharacter, 6: onGetStartupData.onGetStartupData, } } # Retrieving dimensions from database (prevent a request each time) self.dimensions = {} self.db.query("SELECT * FROM `dimensions`") r = self.db.store_result() for f in r.fetch_row(r.num_rows(), how=1): self.dimensions[int(f["dimension_id"])] = f
def host_usagetest_consumer(): '''host_usagetest_consumer() -> takes usage test results from producers of such a metric ( 1-Nagios, 2-url of a UI) and populates WMSMonitor database ''' import os, commands, sys, fpformat sys.path.append('../common') import time import datetime import readconf_func import logging import socket import MySQLdb import urllib confvar = readconf_func.readconf() #CONNECTING TO DB #Opening myslq db connection logger.info("Starting db connection") try: db = MySQLdb.connection(host=confvar.get('WMSMON_DB_HOST'), user=confvar.get('WMSMON_DB_USER'), passwd=confvar.get('WMSMON_DB_PWD'), db=confvar.get('WMSMON_DB_NAME')) except Exception, e: stri2 = "ERROR CONNECTING TO WMSMonitor DB: " + str(e) logger.error(stri2) logger.error( "ERROR: Please check mysql daemon is running and connection parameters are correct!" ) sys.exit(1)
def update_record(table,which,who,what,where): connection = MySQLdb.connection(host=MYSQL_HOST_M,port=MYSQL_PORT,user=MYSQL_USER,passwd=MYSQL_PASS) connection.select_db(MYSQL_DB) try: connection.query("INSERT INTO `newxx`.`"+table+"` ( `who`, `which`, `what`, `where`) VALUES ( '"+who+"', '"+which+"', '"+what+"', '"+where+"')") return connection.insert_id() except: return 0
def upload_file(table,newxxid,author,filename,content): connection = MySQLdb.connection(host=MYSQL_HOST_M,port=MYSQL_PORT,user=MYSQL_USER,passwd=MYSQL_PASS) connection.select_db(MYSQL_DB) try: connection.query("INSERT INTO `newxx`.`"+table+"` ( `fileid`, `author`, `filename`, `content`) VALUES ( '"+newxxid+"', '"+author+"', '"+filename+"', '"+content+"')") return 'ok' except NameError, e: print e
def get_one_record(sql): connection = MySQLdb.connection(host=MYSQL_HOST_M,port=MYSQL_PORT,user=MYSQL_USER,passwd=MYSQL_PASS) connection.select_db(MYSQL_DB) connection.query(sql) r = connection.store_result() row = r.fetch_row() if row: return row[0] return None
def __init__(self, host, user, passwd, dbname): self.host = host self.user = user self.passwd = passwd self.dbname = dbname self.db = MySQLdb.connection(db=self.dbname, user=self.user, passwd=self.passwd, host=host) self.db.set_character_set('utf8')
def get_user(id): connection = mdb.connection(host='localhost', user='******', passwd='', db='fbhack', use_unicode=True, charset='utf8') cursor = connection.cursor() cursor.execute("SELECT * FROM users WHERE facebook_id = %s" (id,)) row = cursor.fetchone() return jsonify(result="OK", )
def __init__(self, domain, subdomain_list, mysql_config): super(DomainScan, self).__init(domain, subdomain_list) if mysql_config: self.mysql_connection = MySQLdb.connection(mysql_config["host"], mysql_config["user"], mysql_config["pass"], mysql_config["database"]) else: self.mysql_connection = None
def get_one_record(sql): connection = MySQLdb.connection(host=MYSQL_HOST_M, port=MYSQL_PORT, user=MYSQL_USER, passwd=MYSQL_PASS) connection.select_db(MYSQL_DB) connection.query(sql) r = connection.store_result() row = r.fetch_row() if row: return row[0] return None
def ftp(self): if not fileExists(self.base.vsftpd['bin']): error_message('Vsftpd not installed!') password = self.__getMySQLPassword() try: db = MySQLdb.connection(self.base.mysql['host'], self.base.mysql['user'], password) except Exception, msg: error_message(msg)
def ftp(self): if not fileExists(self.base.vsftpd['bin']): error_message('Vsftpd not installed!') password = self.__getMySQLPassword() try: db = MySQLdb.connection( self.base.mysql['host'], self.base.mysql['user'], password ) except Exception, msg: error_message(msg)
def get_filelist(sql): connection = MySQLdb.connection(host=MYSQL_HOST_M,port=MYSQL_PORT,user=MYSQL_USER,passwd=MYSQL_PASS) connection.select_db(MYSQL_DB) connection.query(sql) r = connection.store_result() row = r.fetch_row() result="" while row: result="%s <br/>\n<id=%s> <a href='%s'>%s</a> %s %s %s" % (result,row[0][0],row[0][1],row[0][1],row[0][2],row[0][3],row[0][5]) row = r.fetch_row() return result
def get_raw_filelist(sql): connection = MySQLdb.connection(host=MYSQL_HOST_M,port=MYSQL_PORT,user=MYSQL_USER,passwd=MYSQL_PASS) connection.select_db(MYSQL_DB) connection.query(sql) r = connection.store_result() row = r.fetch_row() result="" while row: result="%s \nfilename=%s id=%s newxx=%s" % (result,row[0][3],row[0][0],row[0][1]) row = r.fetch_row() return result
def get_record(sql): connection = MySQLdb.connection(host=MYSQL_HOST_M,port=MYSQL_PORT,user=MYSQL_USER,passwd=MYSQL_PASS) connection.select_db(MYSQL_DB) connection.query(sql) r = connection.store_result() row = r.fetch_row() if row: result=row[0][0] else: result="0" while row: result="%s <br/> %s" % (result,row[0]) row = r.fetch_row() return result
def update_record(table, which, who, what, where): connection = MySQLdb.connection(host=MYSQL_HOST_M, port=MYSQL_PORT, user=MYSQL_USER, passwd=MYSQL_PASS) connection.select_db(MYSQL_DB) try: connection.query("INSERT INTO `newxx`.`" + table + "` ( `who`, `which`, `what`, `where`) VALUES ( '" + who + "', '" + which + "', '" + what + "', '" + where + "')") return connection.insert_id() except: return 0
def retrieve_chrinfo(schema, outpath): try: print 'trying to retrieve chromosome length from ucsc (%s)...'%schema db = MySQLdb.connection(host="genome-mysql.cse.ucsc.edu",user="******",db=schema) db.query("""select chrom, size from chromInfo""") rows = db.store_result() outfile = file("%s/%s"%(outpath, schema), 'w') for row in rows.fetch_row(10000000): outfile.write('%s\t%s\n'%(row[0], row[1])) except (OperationalError, ProgrammingError): print 'The schema %s doesnt have the table chromInfo'%schema finally: db.close()
def __init__(self): framework.server.__init__(self, 7002, 20, framework.GlobalHandler, False) self.db = MySQLdb.connection( host = "127.0.0.1", user = "******", passwd = "", db = "aoc", ) self.packetMgr = { "CSPlayerAgent" : { 0 : onAuthenticate.onAuthenticate, } }
def upload_file(table, newxxid, author, filename, content): connection = MySQLdb.connection(host=MYSQL_HOST_M, port=MYSQL_PORT, user=MYSQL_USER, passwd=MYSQL_PASS) connection.select_db(MYSQL_DB) try: connection.query( "INSERT INTO `newxx`.`" + table + "` ( `fileid`, `author`, `filename`, `content`) VALUES ( '" + newxxid + "', '" + author + "', '" + filename + "', '" + content + "')") return 'ok' except NameError, e: print e
def __init__(self, db=None, user="******" , password="******", dbname="sahana"): """ Connect to datbase, init graph. If a database connection is supplied in db, use that, else connect using the supplied or defaulted user, password, and database name. """ import MySQLdb OrderTables.__init__(self) if db: self.db = db else: self.db = MySQLdb.connection( host="localhost", user=user, passwd=password, db=dbname)
def get_raw_filelist(sql): connection = MySQLdb.connection(host=MYSQL_HOST_M, port=MYSQL_PORT, user=MYSQL_USER, passwd=MYSQL_PASS) connection.select_db(MYSQL_DB) connection.query(sql) r = connection.store_result() row = r.fetch_row() result = "" while row: result = "%s \nfilename=%s id=%s newxx=%s" % (result, row[0][3], row[0][0], row[0][1]) row = r.fetch_row() return result
def get_record(sql): connection = MySQLdb.connection(host=MYSQL_HOST_M, port=MYSQL_PORT, user=MYSQL_USER, passwd=MYSQL_PASS) connection.select_db(MYSQL_DB) connection.query(sql) r = connection.store_result() row = r.fetch_row() if row: result = row[0][0] else: result = "0" while row: result = "%s <br/> %s" % (result, row[0]) row = r.fetch_row() return result
def __init__(self): framework.server.__init__(self, 7000, 20, framework.GlobalHandler, False) self.db = MySQLdb.connection( host = "127.0.0.1", user = "******", passwd = "", db = "aoc", ) self.packetMgr = { "UniverseAgent" : { 0 : onInitiateAuthentification.onInitiateAuthentification, 1 : onAnswerChallenge.onAnswerChallenge, } }
def connect(): """ Connect to MySQL database by loading database setttings from config.ini. """ # Load configurations db_config = ReadMySQLConfig() try: print("Connecting to MySQL database...") conn = mdb.connection(**db_config) print('connection established.') except mdb.Error as e: print(e) finally: conn.close() print('connection closed.')
def __init__(self): framework.server.__init__(self, 7000, 20, framework.GlobalHandler, False) self.db = MySQLdb.connection( host="127.0.0.1", user="******", passwd="", db="aoc", ) self.packetMgr = { "UniverseAgent": { 0: onInitiateAuthentification.onInitiateAuthentification, 1: onAnswerChallenge.onAnswerChallenge, } }
def get_filelist(sql): connection = MySQLdb.connection(host=MYSQL_HOST_M, port=MYSQL_PORT, user=MYSQL_USER, passwd=MYSQL_PASS) connection.select_db(MYSQL_DB) connection.query(sql) r = connection.store_result() row = r.fetch_row() result = "" while row: result = "%s <br/>\n<id=%s> <a href='%s'>%s</a> %s %s %s" % ( result, row[0][0], row[0][1], row[0][1], row[0][2], row[0][3], row[0][5]) row = r.fetch_row() return result
def plot_wmslist_inALIAS(): '''plot_wmslist_inALIAS() -> utility to plot on file the list of wms in aliases defined for your site ''' fileout=open('/var/www/html/wmsmon/main/wmspoolinfo.txt','w') fileout.write('GENERAL INFO ABOUT CNAF WMS/LB INSTANCES POOL ON: ' + commands.getoutput('date')) print "Starting db connection" try: db = MySQLdb.connection(host=confvar.get('WMSMON_DB_HOST'),user=confvar.get('WMSMON_DB_USER'),passwd=confvar.get('WMSMON_DB_PWD'),db=confvar.get('WMSMON_DB_NAME')) except Exception,e: stri2= "ERROR CONNECTING TO WMSMonitor DB: " + str(e) print stri2 print "ERROR: Please check mysql daemon is running and connection parameters are correct!" sys.exit(1)
def __init__(self): framework.server.__init__(self, 7001, 20, framework.GlobalHandler, False) self.db = MySQLdb.connection(host="127.0.0.1", user="******", passwd="", db="aoc") self.packetMgr = { "PlayerAgent": { 0: onAuthenticate.onAuthenticate, 1: onCreateCharacter.onCreateCharacter, 6: onGetStartupData.onGetStartupData, } } # Retrieving dimensions from database (prevent a request each time) self.dimensions = {} self.db.query("SELECT * FROM `dimensions`") r = self.db.store_result() for f in r.fetch_row(r.num_rows(), how=1): self.dimensions[int(f["dimension_id"])] = f
def run(self): #INIZIALIZATION logger = logging.getLogger('data_collector') TIME_AT_START = time.time() logger.info('THIS IS WMSMonitor data_collector_daemon') logger.info('Reading wmsmon conf file') confvar=readconf_func.readconf(); #CONNECTING TO DB #Opening myslq db connection logger.info("Starting db connection") try: db = MySQLdb.connection(host=confvar.get('WMSMON_DB_HOST'),user=confvar.get('WMSMON_DB_USER'),passwd=confvar.get('WMSMON_DB_PWD'),db=confvar.get('WMSMON_DB_NAME')) except Exception,e: str= "ERROR CONNECTING TO WMSMonitor DB: " + str(e) logger.error(str) logger.error("ERROR: Please check mysql daemon is running and connection parameters are correct!") sys.exit(1)
def __init__(self): framework.server.__init__(self, 7040, 20, framework.GlobalHandler, True) self.db = MySQLdb.connection( host = "127.0.0.1", user = "******", passwd = "", db = "aoc", ) self.packetMgr = { "GameAgent" : { 1 : onPing.onPing, 3 : onAuthenticate2.onAuthenticate2, 5 : onUpdateGameServerStats.onUpdateGameServerStats, }, "GameCharAgent" : { } }
def __init__(self): framework.server.__init__(self, 7040, 20, framework.GlobalHandler, True) self.db = MySQLdb.connection( host="127.0.0.1", user="******", passwd="", db="aoc", ) self.packetMgr = { "GameAgent": { 1: onPing.onPing, 3: onAuthenticate2.onAuthenticate2, 5: onUpdateGameServerStats.onUpdateGameServerStats, }, "GameCharAgent": {} }
def main(): schema = sys.argv[1] #get all the database names from the information schema in ucsc if schema == 'all': print 'retrieving all schema names...' try: db = MySQLdb.connection(host="genome-mysql.cse.ucsc.edu",user="******",db="information_schema") db.query("""select schema_name from schemata""") rows = db.store_result() schemas = [] for row in rows.fetch_row(10000000): schemas.extend(row) finally: db.close() #retrieve all chrinfo that exist for schema in schemas: retrieve_chrinfo(schema, os.path.dirname(__file__)) else: retrieve_chrinfo(schema, os.path.dirname(__file__))
def __init__(self, db=None, user="******", password="******", dbname="sahana"): """ Connect to datbase, init graph. If a database connection is supplied in db, use that, else connect using the supplied or defaulted user, password, and database name. """ import MySQLdb OrderTables.__init__(self) if db: self.db = db else: self.db = MySQLdb.connection(host="localhost", user=user, passwd=password, db=dbname)
def run(self): #INIZIALIZATION logger = logging.getLogger('data_collector') TIME_AT_START = time.time() logger.info('THIS IS WMSMonitor data_collector_daemon') logger.info('Reading wmsmon conf file') confvar = readconf_func.readconf() #CONNECTING TO DB #Opening myslq db connection logger.info("Starting db connection") try: db = MySQLdb.connection(host=confvar.get('WMSMON_DB_HOST'), user=confvar.get('WMSMON_DB_USER'), passwd=confvar.get('WMSMON_DB_PWD'), db=confvar.get('WMSMON_DB_NAME')) except Exception, e: str = "ERROR CONNECTING TO WMSMonitor DB: " + str(e) logger.error(str) logger.error( "ERROR: Please check mysql daemon is running and connection parameters are correct!" ) sys.exit(1)
def lib(): data=request.body.read() root=ET.fromstring(data) recv_con={child.tag:child.text for child in root} d=recv['Content'] if d.startswith('d '): elif d in ['l','list']: connection=MySQLdb.connection(host=MYSQL_HOST_M, port=MYSQL_PORT, user=MYSQL_USER, passwd=MYSQL_PASS) connection.select_db(MYSQL_DB) cursor=connection.cursor() cursor.execute('SELECT * FROM MYSQL_diary') rows=[item[2] for item in cursor.fetchall()] data=''.join(rows) myxml = '''\ <xml> <ToUserName><![CDATA[{}]]></ToUserName> <FromUserName><![CDATA[{}]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[text]]></MsgType> <Content><![CDATA[{}]]></Content></xml> '''.format(mydict['FromUserName'],mydict['ToUserName'],data) return myxml elif d.startswith('b '): userid = recv_con['FromUserName'] try: connection = MySQLdb.connect(host=MYSQL_HOST_M, port=MYSQL_PORT, \ user=MYSQL_USER, passwd=MYSQL_PASS, db="user_table") cursor = connection.cursor() cursor.execute('SELECT * FROM user_table') results = cursor.fetchall() except MySQLdb.Error, e: print "Error %d: %s" % (e.args[0],e.args[1]) # reply_content = "系统服务器出错!" return None # 此处应该给用户提示 finally:
#!/usr/bin/env python import cgi import MySQLdb # Following line is what you would need to create table # prior to running for the first time # # create table comment # (info text, enteredat timestamp, # cid int primary key not null auto_increment) # Connect to MySQL database db = MySQLdb.connection(host='localhost',user="******",passwd="bda",db="sw200") #db = MySQLdb.connection('host=mysql://bda:bda@localhost/sw200') # Collect values from form inputs = cgi.FieldStorage() fill = {} for key in inputs: fill[key] = inputs[key].value # Read back 10 latest comments db.query("SELECT pass FROM userid") r = db.store_result() history = "" for row in r.fetch_row(10): history += cgi.escape(row[0]) + "<br>\n" print """content-type: text/html
'Scalar Wind Speed (m/s)': ['21'], 'Wind Direction (degrees)': ['21'], 'Temperature (C)': ['24'], 'Relative Humidity (%)': ['24'], } DerivedDataTypes = {'Dew Point': ['24']} Sites = {} SurfaceMetData = '/data/realtime/CsiDatalogger/SurfaceMet/' HistoryPath = '/home/dms7/process/data/ShefProducts/24HrHistory/' RealTimePath = '/data/realtime/HydroProductsNWS/1HrShefSurfaceMet/' # try: db = MySQLdb.connection(host='host', user='******', passwd='passwd!', db='db') except: print "Error connecting to psdmeta MySQL DB" SendEmailAlert("Error connecting to psdmeta MySQL DB") sys.exit() # Query the DB to find sites with SurfaceMet Instrumentation db.query( """Select site_has_inst.SiteID, site.City, site.State, site.Latitude, site.Longitude, site.Elevation, site.NwsSiteID From site_has_inst, site, inst_manufacturer, inst_type Where inst_type.TypeID = site_has_inst.TypeID AND site_has_inst.SiteID = site.SiteID AND inst_manufacturer.ManufacturerID = inst_type.ManufacturerID AND inst_type.CategoryID IN (12,15,16,21,24)
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os import MySQLdb HOST = os.getenv("DB_HOST") USER = os.getenv("DB_USERNAME") PASS = os.getenv("DB_PASSWORD") NAME = os.getenv("DB_NAME") if HOST != "": conn = MySQLdb.connection (host=HOST, user=USER, passwd=PASS, db=NAME) strstat = [ 'Accepted!', 'Wrong answer', 'Time limit exceeded', 'Memory limit exceeded', 'Run time error', 'Unexpected error', 'Signal #' ] def status (query, SUBID, status, test = -1, time = -1, mem = -1, score = -1): if query == 'COMPILE': if status == -1: msg = 'compiling...' elif status == 0: msg = 'successfully compiled' elif status == 124: msg = 'compile time limit exceeded' else: msg = 'compilation error'
import sha, shelve, time, Cookie, os, MySQLdb DBServer='127.0.0.1' DBUser='' DBPass='' DBName='' db = MySQLdb.connection(host=DBServer,user=DBUser,passwd=DBPass,db=DBName) DB = db.query class Session(object): def __init__(self, expires=None, cookie_path=None): string_cookie = os.environ.get('HTTP_COOKIE', '') self.cookie = Cookie.SimpleCookie() self.cookie.load(string_cookie) self.username="******" self.LI=0 if self.cookie.get('sid'): sid = self.cookie['sid'].value #Check for existing state: DB("SELECT Quser,Qrole FROM QUsers WHERE Qcookie='{0}' LIMIT 1".format(sid)) r=db.store_result() r=r.fetch_row() if len(r)>0: self.username=r[0][0] self.LI=1 # Clear session cookie from other cookies self.cookie.clear() else:
import subprocess import MySQLdb # If possible, read the password for root from /root/.my.cnf config = "/root/.my.cnf" if os.access(config, os.R_OK): f = open(config, "r") lines = f.readlines() f.close() for line in lines: findstring = "password="******"").strip() # DB1 is the db to sync to (Test or new Live) db1 = MySQLdb.connection(host="localhost", user=user, passwd=passwd, db=new_db) # DB2 is the db to sync from (backup of Live) db2 = MySQLdb.connection(host="localhost", user=user, passwd=passwd, db=old_db) def tablelist(db): db.query("SHOW TABLES;") r = db.store_result() tables = [] for row in r.fetch_row(300): tables.append(row[0]) return tables # Dict to load up the database Structure def tablestruct(db): tablestruct = {}
print "Saved %s" % p else: print "For some reason, isfile(%s) returned True" % p # Open file and read fcount = 1 post_xml = "<post image=\"%s\" post_date=\"%s\" last_updated=\"%s\" url=\"%s\" num-comments=\"%s\" blogger=\"%s\"><post_title><![CDATA[%s]]></post_title><post_blurb><![CDATA[%s]]></post_blurb><post_content><![CDATA[%s]]></post_content></post>" # Select post IDs # Demo: tag:blogger.com,1999:blog-32316390.post-1712101221610946611 from posts.blogger_id dbuser = '' dbpass = '' dbbase = '' db = sql.connection(host="localhost", user=dbuser, passwd=dbpass, db=dbbase) db.query("SELECT last_updated, `blogger_id` FROM posts") r = db.store_result() current_posts = {} if 'test' in sys.argv: sql_table = 'posts_test' do_media = False save_file = 'actual/probably_total_test.sql' else: for s in r.fetch_row(maxrows=0): current_posts[s[1]] = datetime.strptime( s[0], '%Y-%m-%d %H:%M:%S') # 0 = updated, 1 = blogger_id sql_table = 'posts' do_media = True save_file = 'actual/probably_total.sql'
def lb_query(lbhost,STARTDATE,ENDDATE,DBTYPE): #Initializing logger import logging logger = logging.getLogger('lb_query') confvar = readconf_func.readconf(); users_stats = [] # Establish a connection if DBTYPE == 'LBPROXY': lbhost = confvar['LBPROXY_DB_HOST'] dbuser = confvar['LBPROXY_DB_USER'] dbname = confvar['LBPROXY_DB_NAME'] elif DBTYPE == 'LBSERVER': lbhost = confvar['LB_DB_HOST'] dbuser = confvar['LB_DB_USER'] dbname = confvar['LB_DB_NAME'] logger.info('Establishing a connection with mysql DB') db = MySQLdb.connection(host = lbhost , user = dbuser , db = dbname, passwd = confvar['SERVER_MYSQL_PASSWORD'][1:-1]) ################ MAIN DATA CONTAINER LIST INITIALIZATION ###### wmsdata_list = [] ############################################################### def put_into_wmsdata(wmsdata_list,wmshostname,userdn,fieldlist,valuelist): wmsFOUND = False for wmsdata in wmsdata_list: if wmsdata.host == wmshostname: wmsFOUND = True try: wmsdata.add_user(userdn) except wmsdata_class.UserPresent: # logger.warning('User Already present in wmdata for host: ' + wmsdata.host) for field in fieldlist: wmsdata[userdn][field] = valuelist[fieldlist.index(field)] if not wmsFOUND: wmsdata = wmsdata_class.wmsdata(wmshostname) wmsdata.add_user(userdn) for field in fieldlist: wmsdata[userdn][field] = valuelist[fieldlist.index(field)] wmsdata_list.append(wmsdata) # Run a MySQL query to find the number of single jobs submitted in a given time interval PER USER and PER WMS logger.info('Running a MySQL query to find the number of single jobs submitted in a given time interval PER USER and PER WMS') querystr = "select users.cert_subj,host,COUNT(DISTINCT(events.jobid)) from events,short_fields inner join users on events.userid=users.userid where events.event=short_fields.event and code='17' and time_stamp>'" + STARTDATE + "' and time_stamp <='" + ENDDATE + "' and events.jobid=short_fields.jobid and name='NSUBJOBS' and value='0' group by users.cert_subj,host;" logger.info('Query is : ' + querystr) db.query(querystr) r = db.store_result() # Iterate through the result set WMP_in = 0 if r: for i in range(1,r.num_rows() + 1): row = r.fetch_row() # logger.debug('FOUND ROW: ' + row ) if row: dn = row[0][0] rowhost = row[0][1] rowWMP_in = row[0][2] put_into_wmsdata(wmsdata_list,rowhost,dn,['WMP_in'],[rowWMP_in]) ###################################################################################################################### ### We decided to take anymore the avg and the std of nodes per collection because they are not summable on more lb ### WHat we do is to take PER USER the total number of jobs in collection, the min and max of nodes per collection ### This are summable and avg calculation can be done on collector side ### Anyway we sum over user on sensors side and we return alse the total number of jobs per collection, min and max of nodes PER WMS ### Summing over wmsdata data will be done at the end of this function ore on the wrapper if the wmsdata_list is returned ########################################################################################################################## # Run a query to find per user and per host the number of collection, the total number of nodes in collection the min and max of nodes per collection logger.info('Running a query to find per user and per host the number of collection, the total number of nodes in collection the min and max of nodes per collection') querystr = "select users.cert_subj, host, COUNT(value), sum(value), min(value),max(value) from events,short_fields inner join users on events.userid=users.userid where events.event=short_fields.event and code='17' and time_stamp>'" + STARTDATE + "' and time_stamp <='" + ENDDATE + "' and events.jobid=short_fields.jobid and name='NSUBJOBS' and short_fields.event='0' and value>'0' group by users.cert_subj,host" logger.info('Query is : ' + querystr) db.query(querystr) r = db.store_result() # Iterate through the result set if r: for i in range(1,r.num_rows() + 1): row = r.fetch_row() if row: dn = row[0][0] rowhost = row[0][1] rowWMP_in_col = row[0][2] rowWMP_in_col_nodes = row[0][3] rowWMP_in_col_min_nodes = row[0][4] rowWMP_in_col_max_nodes = row[0][5] put_into_wmsdata(wmsdata_list,rowhost,dn,['WMP_in_col','WMP_in_col_nodes','WMP_in_col_min_nodes','WMP_in_col_max_nodes'],[rowWMP_in_col,rowWMP_in_col_nodes,rowWMP_in_col_min_nodes,rowWMP_in_col_max_nodes]) # Run a query to find PER USER and PER WMS the number of jobs enqued to WM from WMP in a given time interval logger.info("Run a query to find PER USER and PER WMS the number of jobs enqued to WM from WMP in a given time interval") querystr = "select users.cert_subj, host, COUNT(events.jobid) from events,short_fields inner join users on events.userid=users.userid where events.event=short_fields.event and code='4' and time_stamp >'" + STARTDATE + "' and time_stamp <='" + ENDDATE + "' and events.jobid=short_fields.jobid and events.event=short_fields.event and prog='NetworkServer' and name='RESULT' and value='OK' group by users.cert_subj,host;" logger.info('Query is : ' + querystr) db.query(querystr) r = db.store_result() if r: for i in range(1,r.num_rows() + 1): row = r.fetch_row() if row: dn = row[0][0] rowhost = row[0][1] rowWM_in = row[0][2] put_into_wmsdata(wmsdata_list,rowhost,dn,['WM_in'],[rowWM_in]) # Run a MySQL query to find the number both collection and single jobs enqueued to WM in a given time interval from LogMonitor (i.e. Resubmitted) logger.info('Run a MySQL query to find the number both collection and single jobs enqueued to WM in a given time interval from LogMonitor (i.e. Resubmitted) PER USER and PER WMS') querystr="select users.cert_subj,host,COUNT(DISTINCT(events.jobid)) from events,short_fields inner join users on events.userid=users.userid where code='4' and time_stamp >'" + STARTDATE + "' and time_stamp <='" + ENDDATE + "' and events.jobid=short_fields.jobid and events.event=short_fields.event and name='RESULT' and value='OK' and prog='LogMonitor' group by users.cert_subj, host;" logger.info('Query is : ' + querystr) db.query(querystr) r = db.store_result() # Iterate through the result set if r: for i in range(1,r.num_rows() + 1): row = r.fetch_row() if row: usernew = row[0][0] index = row[0][0].find('/CN=proxy/CN=proxy') if index != -1: usernew=row[0][0][0:index] dn = usernew rowhost = row[0][1] rowWM_in_res = row[0][2] put_into_wmsdata(wmsdata_list,rowhost,dn,['WM_in_res'],[rowWM_in_res]) # Run a MySQL query to find the number single jobs enqueued to Job Controller from WM in a given time interval PER WMS and PER USER logger.info('Run a MySQL query to find the number single jobs enqueued to Job Controller from WM in a given time interval per USER and PER WMS') querystr="select users.cert_subj,host,COUNT(DISTINCT(events.jobid)) from events,short_fields inner join users on events.userid=users.userid where code='4' and time_stamp >'" + STARTDATE + "' and time_stamp <='" + ENDDATE + "' and events.jobid=short_fields.jobid and events.event=short_fields.event and name='RESULT' and value='OK' and prog='WorkloadManager' group by users.cert_subj,host;" logger.info('Query is : ' + querystr) db.query(querystr) r = db.store_result() # Iterate through the result set if r: for i in range(1,r.num_rows() + 1): row = r.fetch_row() if row: usernew = row[0][0] index = row[0][0].find('/CN=proxy/CN=proxy') if index != -1: usernew=row[0][0][0:index] dn = usernew rowhost = row[0][1] rowJC_in = row[0][2] put_into_wmsdata(wmsdata_list,rowhost,dn,['JC_in'],[rowJC_in]) # Run a MySQL query to find the number single jobs enqueued to Condor from Job Controller in a given time interval PER USER and PER WMS logger.info('Run a MySQL query to find the number single jobs enqueued to Condor from Job Controller in a given time interval PER USER and PER WMS') querystr="select users.cert_subj,host,COUNT(DISTINCT(events.jobid)) from events,short_fields inner join users on events.userid=users.userid where code='1' and time_stamp >'" + STARTDATE + "' and time_stamp <='" + ENDDATE + "' and events.jobid=short_fields.jobid and events.event=short_fields.event and name='RESULT' and value='OK' and prog='JobController' group by users.cert_subj,host;" logger.info('Query is : ' + querystr) db.query(querystr) r = db.store_result() # Iterate through the result set if r: for i in range(1,r.num_rows() + 1): row = r.fetch_row() if row: usernew = row[0][0] index = row[0][0].find('/CN=proxy/CN=proxy') if index != -1: usernew=row[0][0][0:index] dn = usernew rowhost = row[0][1] rowJC_out = row[0][2] put_into_wmsdata(wmsdata_list,rowhost,dn,['JC_out'],[rowJC_out]) # Run a MySQL query to find the number of jobs done in a given time interval PER USER and PER WMS logger.info('Run a MySQL query to find the number single jobs done successfully in a given time interval PER USER and PER WMS') querystr="select users.cert_subj,host,COUNT(DISTINCT(events.jobid)) from events,short_fields inner join users on events.userid=users.userid where events.jobid=short_fields.jobid and code='10' and time_stamp >'" + STARTDATE + "' and time_stamp <='" + ENDDATE + "' and prog='LogMonitor' and name='REASON' and (value='Job terminated successfully' or value='Job Terminated Successfully') group by users.cert_subj,host;" logger.info('Query is : ' + querystr) db.query(querystr) r = db.store_result() # Iterate through the result set if r: for i in range(1,r.num_rows() + 1): row = r.fetch_row() if row: usernew = row[0][0] index = row[0][0].find('/CN=proxy/CN=proxy') if index != -1: usernew=row[0][0][0:index] dn = usernew rowhost = row[0][1] rowJOB_DONE = row[0][2] put_into_wmsdata(wmsdata_list,rowhost,dn,['JOB_DONE'],[rowJOB_DONE]) # Run a MySQL query to find the number of jobs aborted in a given time interval PER USER and PER WMS logger.info('Run a MySQL query to find the number single jobs aborted in a given time interval PER USER and PER WMS') querystr="select users.cert_subj,host,COUNT(DISTINCT(events.jobid)) from events inner join users on events.userid=users.userid where code='12' and time_stamp >'" + STARTDATE + "' and time_stamp <='" + ENDDATE + "' group by users.cert_subj,host;" logger.info('Query is : ' + querystr) db.query(querystr) r = db.store_result() # Iterate through the result set if r: for i in range(1,r.num_rows() + 1): row = r.fetch_row() if row: usernew = row[0][0] index = row[0][0].find('/CN=proxy/CN=proxy') if index != -1: usernew=row[0][0][0:index] dn = usernew rowhost = row[0][1] rowJOB_ABORTED = row[0][2] put_into_wmsdata(wmsdata_list,rowhost,dn,['JOB_ABORTED'],[rowJOB_ABORTED]) # Run a MySQL query to find the DEST_CE of jobs in a given time interval PER WMS logger.info('Run a MySQL query to find DEST_CE of jobs in a given time interval PER WMS') ##### old ce query - this double counts ce for jobs landed onto cream ce #querystr="select value, host, COUNT(value) from (select DISTINCT(short_fields.event),events.jobid, short_fields.value, host from events,short_fields where events.jobid=short_fields.jobid and time_stamp >'" + STARTDATE + "' and time_stamp <='" + ENDDATE + "' and prog='WorkloadManager' and name='DEST_HOST' and value!='localhost' and value!='unavailable' and code='15') as temp group by value, host;" ################################################## ##### New query not to double counting ce for jobs landed onto cream ce querystr="select value,host, count(value) from (select distinct(short_fields.jobid), value, host from short_fields inner join events where events.code='15' and events.prog = 'WorkloadManager' and name='DEST_HOST' and time_stamp > '" + STARTDATE + "' and time_stamp <='" + ENDDATE + "' and value!='localhost' and value!='unavailable' and events.jobid=short_fields.jobid) as temp group by value, host;" ################################################## logger.info('Query is : ' + querystr) db.query(querystr) r = db.store_result() # Iterate through the result set if r: for i in range(1,r.num_rows() + 1): row = r.fetch_row() if row: rowCE = row[0][0] rowhost = row[0][1] rowCEcount = row[0][2] wmsFOUND = False for wmsdata in wmsdata_list: if wmsdata.host == rowhost: wmsFOUND = True try: wmsdata.add_ce(rowCE) wmsdata.add_ce_count(rowCE,rowCEcount) except wmsdata_class.CEPresent: # logger.warning('User Already present in wmdata for host: ' + wmsdata.host) wmsdata.add_CE_count(rowCEcount) if not wmsFOUND: wmsdata = wmsdata_class.wmsdata(rowhost) wmsdata.add_ce(rowCE) wmsdata.add_ce_count(rowCE,rowCEcount) wmsdata_list.append(wmsdata) # Run a MySQL query to find the LB used to store the jobs in a given time interval # Available only if DBTYPE = LBPROXY if DBTYPE == 'LBPROXY': logger.info('Run a MySQL query to find the LB used to store the jobs in a given time interval') querystr="select distinct dg_jobid from jobs inner join events on jobs.jobid=events.jobid where events.code = '17' and time_stamp > '" + STARTDATE + "' and time_stamp < '" + ENDDATE + "';" logger.info('Query is : ' + querystr) db.query(querystr) r = db.store_result() # Iterate through the result set if r: for i in range(1,r.num_rows() + 1): row = r.fetch_row() if row: rowLB = row[0][0] LBstr = LBstr = rowLB[rowLB.find('//') + 2 : rowLB.find(':9000') ] for wmsdata in wmsdata_list: wmsdata.add_lb(LBstr) db.close() # filename= confvar['INSTALL_PATH'] +'/sensors//tmp/USERSTATS_' + lbhost + '_' + wmshost + '.txt' # fileusersstats = open(filename,'w') # fileusersstats.write('START OF FILE\n') # for i in range(0,len(users_stats)): # fileusersstats.write(str(users_stats[i][0]) + '|' + str(users_stats[i][1]) + '|' + str(users_stats[i][2]) + '|' + str(users_stats[i][3]) + '|' + str(users_stats[i][4]) + '|' + str(users_stats[i][5]) + '|' + str(users_stats[i][6]) + '|' + str(users_stats[i][7]) + '|' + str(users_stats[i][8]) + '|\n') # fileusersstats.write('END OF FILE\n') # fileusersstats.close() return wmsdata_list
import subprocess import MySQLdb # If possible, read the password for root from /root/.my.cnf config = "/root/.my.cnf" if os.access(config, os.R_OK): f = open(config, "r") lines = f.readlines() f.close() for line in lines: findstring = "password="******"").strip() # DB1 is the db to sync to (Test or new Live) db1 = MySQLdb.connection(host="localhost", user=user, passwd=passwd, db=new_db) # DB2 is the db to sync from (backup of Live) db2 = MySQLdb.connection(host="localhost", user=user, passwd=passwd, db=old_db) def tablelist(db): db.query("SHOW TABLES;") r = db.store_result() tables = [] for row in r.fetch_row(300): tables.append(row[0]) return tables # Dict to load up the database Structure
Note - need to moderate live public boards of this type! """ import cgi import MySQLdb # Following line is what you would need to create table # prior to running for the first time # # create table comment # (info text, enteredat timestamp, # cid int primary key not null auto_increment) # Connect to MySQL database db = MySQLdb.connection(host="localhost",user="******",passwd="Tulsi@991",db="searchdb") # Collect values from form inputs = cgi.FieldStorage() fill = {} for key in inputs: fill[key] = inputs[key].value # If the form was completed, save what was entered on it """try: said = fill["info"] form = 1 db.query('insert into comment (info) values ("' \ +said.replace('"',r'\"') \ +'")') except:
class MyDaemon(Daemon): #def __init__(self, pidfile, stdin='/tmp/WMSMonitor_data_collector.err', stdout='/tmp/WMSMonitor_data_collector.err', stderr='/tmp/WMSMonitor_data_collector.err'): def __init__(self, pidfile, stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'): self.stdin = stdin self.stdout = stdout self.stderr = stderr self.pidfile = pidfile def run(self): #INIZIALIZATION logger = logging.getLogger('data_collector') TIME_AT_START = time.time() logger.info('THIS IS WMSMonitor data_collector_daemon') logger.info('Reading wmsmon conf file') confvar = readconf_func.readconf() #CONNECTING TO DB #Opening myslq db connection logger.info("Starting db connection") try: db = MySQLdb.connection(host=confvar.get('WMSMON_DB_HOST'), user=confvar.get('WMSMON_DB_USER'), passwd=confvar.get('WMSMON_DB_PWD'), db=confvar.get('WMSMON_DB_NAME')) except Exception, e: str = "ERROR CONNECTING TO WMSMonitor DB: " + str(e) logger.error(str) logger.error( "ERROR: Please check mysql daemon is running and connection parameters are correct!" ) sys.exit(1) #READING wms list from WMSMonitor DB querystr = "select hostname,hosts.idhost,vo,service from admin_host_labels inner join hosts on hosts.idhost=admin_host_labels.idhost where admin_host_labels.active='1';" logger.info('READING wms list from WMSMonitor DB') logger.info(querystr) db.query(querystr) r = db.store_result() row = r.fetch_row(10000) host_vo_dict = {} if len(row) > 0: for line in row: #host_vo_dict[hostname]=[idhost,vo,service] host_vo_dict[line[0]] = line[1:] #CHECKING ACTIVEMQ MSG PATH if (os.access(confvar.get('ACTIVEMQ_MSGPATH'), os.F_OK) == False): logger.error('NOT EXISTING DIRECTORY: ' + confvar.get('ACTIVEMQ_MSGPATH') + '. Please check wmsmon_defaults configuration file\n') sys.exit(1) logger.info('CLOSING DB CONNECTION') db.close() #Starting daemon while True: #Checking for new DATA Messages list1 = os.listdir(confvar.get('ACTIVEMQ_MSGPATH')) if len(list1) == 0: continue logger.info("Starting db connection") try: db = MySQLdb.connection(host=confvar.get('WMSMON_DB_HOST'), user=confvar.get('WMSMON_DB_USER'), passwd=confvar.get('WMSMON_DB_PWD'), db=confvar.get('WMSMON_DB_NAME')) except Exception, e: str = "ERROR CONNECTING TO WMSMonitor DB: " + str(e) logger.error(str) logger.error( "ERROR: Please check mysql daemon is running and connection parameters are correct!" ) sys.exit(1) for msg in list1: wmsflag = 0 lbflag = 0 if ((os.access( confvar.get('ACTIVEMQ_MSGPATH') + '/' + msg, os.F_OK) == True) and (os.path.getsize( confvar.get('ACTIVEMQ_MSGPATH') + '/' + msg) > 0)): #ACCESSING ACTIVEMQ MSG FILE logger.info('Working on file: ' + msg) msghdl = open( confvar.get('ACTIVEMQ_MSGPATH') + '/' + msg, 'r') lines = msghdl.readlines() #INITIALIZING MSG HOST VARIABLES hostname = msg.split('_')[0] line = lines[4] #MESSAGE CHECKS: Is message from registered Instance? if hostname not in host_vo_dict.keys(): #CHECKING WHETHER WMS IS REGISTERED logger.debug( 'FOUND MSG FROM HOSTNAME NOT REGISTERED IN DB: ' + hostname) msghdl.close() #removing msg file to old os.system('rm -f ' + confvar.get('ACTIVEMQ_MSGPATH') + '/' + msg) continue msgepochtime = [ line.split()[7].strip() for line in lines if line.startswith('DATA COLLECTION COMPLETED ON:') ][0] try: logger.info('DATA COLLECTED ON:' + msgepochtime) except: logger.error( 'ERROR: could not read time of data collection in msg: ' + msg) msghdl.close() msgepochtime = 0 continue msghdl.seek(0) #TREATING WMS and LB data sources separately if msg.find('WMS-SENSOR') > 0: #MSG IS FROM A WMS SERVICE ############################################### wmsflag = 1 logger.info('THIS IS A WMS SERVER') #DATA HANDLING hostname_obj = collector_wms_class.collectorwms( hostname) hostname_obj.VO = host_vo_dict[hostname_obj.host][1] hostname_obj.idhost = host_vo_dict[ hostname_obj.host][0] hostname_obj.STARTDATE = [ line.split('=')[1].strip() for line in lines if line.startswith('STARTDATE =') ][0] hostname_obj.ENDDATE = [ line.split('=')[1].strip() for line in lines if line.startswith('ENDDATE =') ][0] try: logger.info('Loading wms values from file: ' + msg) hostname_obj.load_wmsdata_file(msghdl) logger.info('Storing wms values into database') hostname_obj.store_wms_to_db(db) except Exception, e: logger.error( "ERROR Loading wms values from file / Storing to db:" + e) try: logger.info( 'Loading user job data values from file: ' + msg) hostname_obj.load_user(msghdl) logger.info( 'Storing user mapping values into database') hostname_obj.store_user_to_db(db) except Exception, e: logger.error( "ERROR Loading user job data values from file / Storing to db::" + e) try: logger.info( 'Loading wms rate data values from file: ' + msg) hostname_obj.load_wmsratedata_file(msghdl) logger.info( 'Storing wms rate values into database') hostname_obj.store_wmsratedata_to_db(db) except Exception, e: logger.error( "ERROR Loading wmsratedata values from file / Storing to db:" + e) try: logger.info('Loading CE_MM values from file: ' + msg) hostname_obj.load_ce_mm_dict(msghdl) logger.info('Storing CE_MM values into database') hostname_obj.store_cemm_to_db(db) except Exception, e: logger.error( "ERROR Loading CE_MM values from file / Storing to db:" + e)
import MySQLdb db = MySQLdb.connection(host="localhost", user="******", passwd="password", db="spaza_shop") db.query("SUM(Qty) AS TotalQty, Product_Id, Name from Sales s join Products p ON s.Product_Id = p.Id GROUP BY Name ORDER BY SUM(Qty) DESC LIMIT 0,1") r = db.store_result() for row in r.fetch_row(): print row
import urllib2 import sys import time import os import MySQLdb as sql # Select post IDs # Demo: tag:blogger.com,1999:blog-32316390.post-1712101221610946611 from posts.blogger_id db = sql.connection(host="localhost", user="******", passwd="a", db="rugbydump") db.query("SELECT blogger_id, post_title FROM posts") r = db.store_result() posts = [] for row in r.fetch_row(maxrows=0): _Id = row[0] # [todo] skip if its a normal non-blogger comment (at least for testing) so it doesn't f'up, it won't be able to split _Id = _Id.split('-')[2:][0] posts.append(_Id) # proper_url = 'http://rugbydump.blogspot.com/feeds/%s/comments/default?max-results=600' whatisthisvar = 1 failed = [] def get_feed(post_id):
#!/usr/bin/python import MySQLdb, os db = MySQLdb.connection(user='******', passwd='metallurg', db='mmg') db.query("""SELECT convert_id, input, output FROM mmg_media_files_convert""") result = db.store_result() for file in result.fetch_row(1000): print("""Converting from %s to %s""" % (file[1], file[2])) print("""ffmpeg -i %s -f flv -y -ar 44100 %s""" % (file[1], file[2])) os.system( """ffmpeg -i '%s' -bufsize 128000000 -b 700000 -f flv -y -ar 44100 '%s'""" % (file[1], file[2])) # os.system("""mencoder %s -ofps 25 -o %s -of lavf -oac mp3lame -lameopts abr:br=64 -srate 22050 -ovc lavc -lavcopts vcodec=flv:keyint=50:vbitrate=300:mbd=2:mv0:trell:v4mv:cbp:last_pred=3 -vf scale=320:240""" % (file[1], file[2])) print("""Converted %s""" % (file[1])) #db.query("""DELETE FROM mmg_media_files_convert WHERE convert_id = %s""" % (file[0])) #os.unlink(file[1])
# set TTS=y in apps/ivr/Makefile.defs, or rewrite with # enqueuing the parts of the sentence, like # self.ttsfile1 = IvrAudioFile() # self.ttsfile1.open("your_account_balance.wav", AUDIO_READ) # self.enqueue(self.ttsfile1, None) # self.ttsfile2 = IvrAudioFile() # self.ttsfile2.open("%i.wav"%int(res[0][0]), AUDIO_READ) # self.enqueue(self.ttsfile2, None) # etc... # from log import * from ivr import * import MySQLdb db = MySQLdb.connection(host="127.0.0.1", user="******", passwd="sa07", db="business") # or, when using config file db_balance.conf: #db = MySQLdb.connection(host=config["db_host"], user=config["db_user"], passwd=config["db_pwd"], db=config["db_db"]) class IvrDialog(IvrDialogBase) : ttsfile = None def onSessionStart(self) : db.query("select bal_int, bal_decimal from accounts where user='******'" % self.dialog.user) r = db.store_result() res = r.fetch_row(1) if len(res): self.ttsfile = IvrAudioFile().tts("Your account balance is %i dollars and %i cents" % \ (int(res[0][0]), int(res[0][1]))) else: self.ttsfile = IvrAudioFile().tts("Sorry, I do not know your account balance.")