def connect(self): return PySQLPool.getNewConnection(username=self._db_user, password=self._db_pass, host=self._db_host, db=self._db_name, charset='utf8', commitOnEnd=self.commitOnEnd)
def manytasks(sas): connection = PySQLPool.getNewConnection(username='******', password='******', host='localhost', db='sandyfiles') for i in range(2): t = Thread(target=checksamples, args=(i,connection,)) t.start()
def set_conninfo(conn_info, max_pool_count=3): conn = PySQLPool.getNewConnection(host=conn_info["hostname"], username=conn_info["username"], password=conn_info["password"], schema=conn_info["schema"]) PySQLPool.getNewPool().maxActiveConnections = max_pool_count return conn
def testQuickConnectionCreation(self): """ Quick Connection Creation """ try: connection = PySQLPool.getNewConnection(host=self.host, user=self.username, passwd=self.password, db=self.db) except Exception, e: self.fail("Failed to create connection with error: "+str(e))
def _dbConnect(self): try: self.connection = PySQLPool.getNewConnection( username = self.username, \ password = self.password, \ host = self.host, \ db = self.db ) except: # self.logger.error( 'database connection failed' ) die('database connection failed')
def testQuickDictConnectionCreation(self): """ Quick Connection Creation using Kargs/Dict """ try: connection = PySQLPool.getNewConnection(**self.connDict) self.assertTrue( isinstance(connection, PySQLPool.connection.Connection)) except Exception, e: self.fail("Failed to create connection with error: " + str(e))
def set_conninfo(conn_info, max_pool_count = 3): conn = PySQLPool.getNewConnection( host = conn_info["hostname"], username = conn_info["username"], password = conn_info["password"], schema = conn_info["schema"] ) PySQLPool.getNewPool().maxActiveConnections = max_pool_count return conn
def TestConnect(sAddr, nPort, sUser, sPasswd): try: testConn = PySQLPool.getNewConnection(username=sUser, password=sPasswd, host=sAddr, port=nPort, db='mysql', charset='utf8') query = PySQLPool.getNewQuery(testConn) query.query(r'select * from user') return True, '成功' except Exception,e: print e return False,e
def testQuickConnectionCreation(self): """ Quick Connection Creation """ try: connection = PySQLPool.getNewConnection(host=self.host, user=self.username, passwd=self.password, db=self.db) except Exception as e: self.fail("Failed to create connection with error: " + str(e))
def testHashKeyGen(self): """ Test Hash Key Generation """ try: connection = PySQLPool.getNewConnection(**self.connDict) hashStr = ''.join([str(x) for x in connection.info.values()]) key = md5(hashStr).hexdigest() self.assertEqual(connection.key, key, msg="Hash Keys don't match") except Exception, e: self.fail("Failed to create connection with error: " + str(e))
def _connect_to_db(cls, db_name, db_user, db_passwd): try: connection = PySQLPool.getNewConnection( host=cls.__config.db_host, username=db_user, password=cls.__aescoder.decrypt(db_passwd), db=db_name, commitOnEnd=True, use_unicode=True, charset = 'utf8') except MySQLdb.Error, e: raise Exception("connection %d: %s" % (e.args[0], e.args[1]))
def testQuickDictConnectionCreation(self): """ Quick Connection Creation using Kargs/Dict """ try: connDict = { "host":self.host, "user":self.username, "passwd":self.password, "db":self.db} connection = PySQLPool.getNewConnection(**connDict) except Exception, e: self.fail("Failed to create connection with error: "+str(e))
def testQuickConnectionCreation(self): """ Quick Connection Creation """ try: connection = PySQLPool.getNewConnection(host=self.host, user=self.username, passwd=self.password, db=self.db) self.assertTrue( isinstance(connection, PySQLPool.connection.Connection)) except Exception, e: self.fail("Failed to create connection with error: " + str(e))
def testQuickQueryCreation(self): """ Quick Query Creation """ try: connDict = { "host":self.host, "user":self.username, "passwd":self.password, "db":self.db} connection = PySQLPool.getNewConnection(**connDict) query = PySQLPool.getNewQuery(connection) except Exception, e: self.fail('Failed to create PySQLQuery Object')
def testDBConnection(self): """ Test actual connection to Database """ connDict = { "host":self.host, "user":self.username, "passwd":self.password, "db":self.db} connection = PySQLPool.getNewConnection(**connDict) query = PySQLPool.getNewQuery(connection) query.Query("select current_user") result = str(query.record[0]['current_user']).split('@')[0] self.assertEqual(result, 'unittest', "DB Connection Failed")
def testQuickDictConnectionCreation(self): """ Quick Connection Creation using Kargs/Dict """ try: connDict = { "host": self.host, "user": self.username, "passwd": self.password, "db": self.db } connection = PySQLPool.getNewConnection(**connDict) except Exception as e: self.fail("Failed to create connection with error: " + str(e))
def TestConnect(sAddr, nPort, sUser, sPasswd): try: testConn = PySQLPool.getNewConnection(username=sUser, password=sPasswd, host=sAddr, port=nPort, db='mysql', charset='utf8') query = PySQLPool.getNewQuery(testConn) query.query(r'select * from user') return True, '成功' except Exception, e: print e return False, e
def testQuickQueryCreation(self): """ Quick Query Creation """ try: connDict = { "host": self.host, "user": self.username, "passwd": self.password, "db": self.db } connection = PySQLPool.getNewConnection(**connDict) query = PySQLPool.getNewQuery(connection) except Exception as e: self.fail('Failed to create PySQLQuery Object')
def testDBConnection(self): """ Test actual connection to Database """ connDict = { "host": self.host, "user": self.username, "passwd": self.password, "db": self.db } connection = PySQLPool.getNewConnection(**connDict) query = PySQLPool.getNewQuery(connection) query.Query("select current_user") result = str(query.record[0]['current_user']).split('@')[0] self.assertEqual(result, 'unittest', "DB Connection Failed")
def __init__(self, dbServerName): ''' 函数功能: 参数:sDbServerName 选择操作数据库的放 ''' if config.g_databaseInfor[dbServerName]["watch_dog"]==False: #首次运行 config.g_databaseInfor[dbServerName]["password"] = self.DecryptPassword(config.g_databaseInfor[dbServerName]["password"]) config.g_databaseInfor[dbServerName]["watch_dog"] = True connection = PySQLPool.getNewConnection(username=config.g_databaseInfor[dbServerName]["user"], password=config.g_databaseInfor[dbServerName]["password"], host=config.g_databaseInfor[dbServerName]["addr"], port =config.g_databaseInfor[dbServerName]["port"], db = config.g_databaseInfor[dbServerName]["db_name"], charset='utf8') self.pDBConn = connection
def getQueryObject(**kwargs): """ Get a new connection from the PySQLPool @return a new connection, of None if an error has occured """ try: conn = PySQLPool.getNewConnection(host='localhost', username='******', password='', schema='test', port=3306, commitOnEnd=True) query = PySQLPool.getNewQuery(connection=conn) return query #something went wrong except Exception, e: logging.error("Could not get query object: %s", e) return None
def __init__(self, dbServerName): ''' 函数功能: 参数:sDbServerName 选择操作数据库的放 ''' if config.g_databaseInfor[dbServerName]["watch_dog"] == False: #首次运行 config.g_databaseInfor[dbServerName][ "password"] = self.DecryptPassword( config.g_databaseInfor[dbServerName]["password"]) config.g_databaseInfor[dbServerName]["watch_dog"] = True connection = PySQLPool.getNewConnection( username=config.g_databaseInfor[dbServerName]["user"], password=config.g_databaseInfor[dbServerName]["password"], host=config.g_databaseInfor[dbServerName]["addr"], port=config.g_databaseInfor[dbServerName]["port"], db=config.g_databaseInfor[dbServerName]["db_name"], charset='utf8') self.pDBConn = connection
def getQueryObject(**kwargs): """ Get a new connection from the PySQLPool @return a new connection, of None if an error has occured """ try: conn = PySQLPool.getNewConnection(host = 'localhost', username= '******', password= '', schema= 'test', port= 3306, commitOnEnd = True) query = PySQLPool.getNewQuery(connection = conn) return query #something went wrong except Exception, e: logging.error("Could not get query object: %s", e) return None
def testAltConnectionOptions(self): """ Test Creating a connection with alternate arguments """ try: conArgs = { "username": "******", "password": "******", "schema": "tDB", } connection = PySQLPool.getNewConnection(**conArgs) self.assertEqual(connection.info['user'], conArgs['username'], msg="Usernames don't match") self.assertEqual(connection.info['passwd'], conArgs['password'], msg="Passwords don't match") self.assertEqual(connection.info['db'], conArgs['schema'], msg="DBs don't match") except Exception, e: self.fail("Failed to create connection with error: " + str(e))
def testPossitionBasedConnectionArgs(self): """ Test Creating a connection with position based arguments """ try: conArgs = ["tHost", "tUser", "tPass", "tDB", 3306] connection = PySQLPool.getNewConnection(*conArgs) self.assertEqual(connection.info['host'], conArgs[0], msg="Hosts don't match") self.assertEqual(connection.info['user'], conArgs[1], msg="Usernames don't match") self.assertEqual(connection.info['passwd'], conArgs[2], msg="Passwords don't match") self.assertEqual(connection.info['db'], conArgs[3], msg="DBs don't match") self.assertEqual(connection.info['port'], conArgs[4], msg="Ports don't match") except Exception, e: self.fail("Failed to create connection with error: " + str(e))
myfile=scriptfolder+'/../config/'+settingsFile myconfig = ConfigParser.ConfigParser() myconfig.read(myfile) threads=ConfigSectionMap(myconfig, 'Threadmanagement') mysql=ConfigSectionMap(myconfig, 'MySQL') # paths=ConfigSectionMap(myconfig, 'paths') # threads['timestamp'] THREAD_LIMIT = int(stripQuotes(threads['threadlimit'])) m_intervall = float(stripQuotes(threads['monitor_intervall'])) tm_intervall = float(stripQuotes(threads['threadmanager_intervall'])) filler_intervall = float(stripQuotes(threads['queuefiller_intervall'])) tm_threshold = int(stripQuotes(threads['queuelength_threshold'])) db = PySQLPool.getNewConnection(username=stripQuotes(mysql['db_username']), password=stripQuotes(mysql['db_password']), host=stripQuotes(mysql['db_host']), db=stripQuotes(mysql['db_name'])) http_pool = HTTPSConnectionPool("https://sessionserver.mojang.com/session/minecraft/profile", port=443, maxsize=1000000, timeout=urllib3.Timeout(connect=2, read=3)) myexit=exittest() print "starting ...",timestamp() print str(threads) paths="dead Variable" filler = filler.filler(queue=workerqueue,db=db, intervall=filler_intervall, name='filler') tm = threadmanager.threadmanager(queue=workerqueue,db=db,paths=paths,http_pool=http_pool, threshold=tm_threshold, intervall=tm_intervall, name='threadmanager',limit=THREAD_LIMIT) time.sleep(0.2) print "init Queue Filler" filler.start() print "starting threadmanager" tm.start() print "starting monitors"
import MySQLdb import PySQLPool connection = PySQLPool.getNewConnection(username='******',password='******',host='localhost',db='recommendation',charset='utf8') def query(sql): query = PySQLPool.getNewQuery(connection,True) query.Query(sql) return query.record def execute(sql): query = PySQLPool.getNewQuery(connection,True) query.Query(sql) return query.affectedRows def insert(sql): query = PySQLPool.getNewQuery(connection,True) query.Query(sql) return query.lastInsertID
import MySQLdb import PySQLPool connection = PySQLPool.getNewConnection(username='******', password='******', host='localhost', db='recommendation', charset='utf8') def query(sql): query = PySQLPool.getNewQuery(connection, True) query.Query(sql) return query.record def execute(sql): query = PySQLPool.getNewQuery(connection, True) query.Query(sql) return query.affectedRows def insert(sql): query = PySQLPool.getNewQuery(connection, True) query.Query(sql) return query.lastInsertID
from flask import Flask import PySQLPool import logging import datetime import datefinder import requests import pytz import json from bs4 import BeautifulSoup petrol_url = "https://www.iocl.com/Products/Gasoline.aspx" diesel_url = "https://www.iocl.com/Products/HighspeedDiesel.aspx" PySQLPool.getNewPool().maxActiveConnections = 5 connection = PySQLPool.getNewConnection(username='******', password='******', host='localhost', db='petrol_prices') app = Flask(__name__) def ist_today(): now = datetime.datetime.now(pytz.timezone('Asia/Kolkata')) today = datetime.datetime(now.year, now.month, now.day) return today def get_prices_from_iocl_website(): logging.error("fetching from iocl petrol website") petrol_page = requests.get(petrol_url) petrol_soup = BeautifulSoup(petrol_page.content, "html.parser") petrol_tables = petrol_soup.find_all('table', class_="product-table")
def initMySQL(self): self.mainbus["functions"]["mysql_con"] = PySQLPool.getNewConnection(username=self.mainbus["runtimevar"]["config"]["mysql"]["username"], password=self.mainbus["runtimevar"]["config"]["mysql"]["password"], host=self.stripQuotes(self.mainbus["runtimevar"]["config"]["mysql"]["host"]), db=self.stripQuotes(self.mainbus["runtimevar"]["config"]["mysql"]["name"]))
with open(file) as json_file: data = json.load(json_file) return data def format_data(row): for idx in range(len(row)): if (isinstance(row[idx], float) and math.isnan(row[idx])): row[idx] = 0 return row conf = get_conf('conf.ini') conn = PySQLPool.getNewConnection(host=conf['host'], port=conf['port'], user=conf['user'], passwd=conf['passwd'], db=conf['db'], charset='utf8') query = PySQLPool.getNewQuery(conn) years = [2016] for i in years: for j in range(4): timestamp = "%d0%d" % (i, j + 1) profits = ts.get_cashflow_data(i, j + 1) for index, row in profits.head(1).iterrows(): row = format_data(row) sql = "insert into t_cashflow_data(code, cf_sales, rateofreturn, cf_nm, cf_liabilities, cashflowratio, timestamp) values ('%s', %f, %f, %f, %f, %f, '%s') on duplicate key update cf_sales=%f, rateofreturn=%f, cf_nm=%f, cf_liabilities=%f, cashflowratio=%f" % ( row.code, row.cf_sales, row.rateofreturn, row.cf_nm, row.cf_liabilities, row.cashflowratio, timestamp, row.cf_sales, row.rateofreturn, row.cf_nm, row.cf_liabilities,
import json import PySQLPool from pyramid.view import view_config import shiftScheduler # @view_config(route_name='home', renderer='templates/mytemplate.pt') # def my_view(request): # return {'project': 'ShiftScheduler'} dbpool = PySQLPool.getNewConnection(host='localhost', port=3306, user='******', passwd='123456#A', db='scheduler') ss = shiftScheduler.shiftScheduler(dbpool) @view_config(route_name='login', renderer='templates/login.pt') def login(request): return {} @view_config(route_name='logme', renderer='json') def logme(request): if request.method == "GET": r = ss.login(request) return r
# -*- coding:utf8 -*- import MySQLdb import PySQLPool import tushare as ts import sys, math reload(sys) sys.setdefaultencoding('utf-8') conn = PySQLPool.getNewConnection(host='10.211.55.5', port=3306, user='******', passwd='analysis@sec', db='db_sina_data', charset='utf8') query = PySQLPool.getNewQuery(conn) for i in [2016, 2017]: for j in range(4): timestamp = "%d0%d" % (i, j + 1) profits = ts.get_growth_data(i, j + 1) for index, row in profits.head(1).iterrows(): for idx in range(len(row)): if (isinstance(row[idx], float) and math.isnan(row[idx])): row[idx] = 0 sql = "insert into t_growth_data(code, timestamp, mbrg, nprg, nav, targ, epsg, seg) values ('%s', '%s', %f, %f, %f, %f, %f, %f) on duplicate key update mbrg=%f, nprg=%f, nav=%f, targ=%f, epsg=%f, seg=%f" % ( row.code, timestamp, row.mbrg, row.nprg, row.nav, row.targ, row.epsg, row.seg, row.mbrg, row.nprg, row.nav, row.targ, row.epsg, row.seg) query.Query(sql)
def __init__(self): config = CONFIG.dbinfo().copy() # self.db = mysql.connector.Connect(**config) self.db = PySQLPool.getNewConnection(host = config['host'], user = config['user'], password = config['password'], schema = 'vospace')
import PySQLPool #bz_config = { # 'host': 'bz3-db3.eng.vmware.com', # 'port': 3306, # 'user': '******', # 'passwd': 'mts', # 'db': 'bugzilla' #} unifiedCon = PySQLPool.getNewConnection(username='******', password='******', host='unified-stage.eng.vmware.com', db='unified') bugzillaCon = PySQLPool.getNewConnection(username='******', password='******', host='bz3-db3.eng.vmware.com', db='bugzilla') def getQueryUnified(): return PySQLPool.getNewQuery(unifiedCon, commitOnEnd = True) def getQueryBugzilla(): return PySQLPool.getNewQuery(bugzillaCon) if __name__ == '__main__': c1 = getQueryUnified() print c1 c2 = getQueryBugzilla() print c2
import PySQLPool #bz_config = { # 'host': 'bz3-db3.eng.vmware.com', # 'port': 3306, # 'user': '******', # 'passwd': 'mts', # 'db': 'bugzilla' #} unifiedCon = PySQLPool.getNewConnection(username='******', password='******', host='unified-stage.eng.vmware.com', db='unified') bugzillaCon = PySQLPool.getNewConnection(username='******', password='******', host='bz3-db3.eng.vmware.com', db='bugzilla') def getQueryUnified(): return PySQLPool.getNewQuery(unifiedCon, commitOnEnd=True) def getQueryBugzilla(): return PySQLPool.getNewQuery(bugzillaCon) if __name__ == '__main__': c1 = getQueryUnified() print c1
__author__ = 'robertfletcher' ## @package wePredictAPI.database.db # API Classes that are specific to Database connections from flask import g from wePredictAPI.settings import * import PySQLPool ## Defines the Global Connection Pool of Database connections connection = PySQLPool.getNewConnection(username=username, password=password, host=hostname, db=database) class DB(object): """ Contains Functions to get results from the database """ def __init__(self): """ """ def getResultParamaters(self, sql_query, values): """ Get result form database when SQL query and values statement :param sql_query: SQL Query :param values: Vales to be inserted into the query before being run :return data: Object Array of the results """
#!/usr/bin/python # -*- coding: utf-8 -*- import PySQLPool if __name__ == "__main__": connection = PySQLPool.getNewConnection(username='******', password='******', host='localhost', db='sandyfiles') query = PySQLPool.getNewQuery(connection) query.QueryOne('SELECT VERSION()') print query.record
# __author__ = 'Bill' import traceback import datetime import string from functools import partial import PySQLPool from config import config_mysql from utils import utils import model_redis connection = PySQLPool.getNewConnection(username='******', passwd='123456', host='xx.xx.xx.xx', db='busi_rela', charset='utf8') his_conn = PySQLPool.getNewConnection(username='******', passwd='123456', host='xx.xx.xx.xx', db='python', charset='utf8') # print dir(connection) """ Volumn模块 """ DATA_TYPE = ['mem', 'cpu', 'swap', 'network', 'ipcs'] def get_property_type(property_name): property_type = '' for item in DATA_TYPE: if property_name.find(item) != -1:
def __init__(self): self.con = PySQLPool.getNewConnection(username=DATABASES['default']['USER'], password=DATABASES['default']['PASSWORD'], host=DATABASES['default']['HOST'], db=DATABASES['default']['NAME']) self.query = PySQLPool.getNewQuery(self.con) self.query.Query('SET SQL_SAFE_UPDATES=0')
#!/usr/bin/env python import PySQLPool from config import config # Supporting functions/one time user functions for the Eve-Refine-Finder db = PySQLPool.getNewConnection(user=config['username'], passwd=config['password'], db=config['db'], commitOnEnd=True) queryText = """SELECT t1.typeID, (t1.volume * t1.portionSize) as Volume, t1.portionSize, SUM(CASE WHEN m1.materialTypeID = 34 THEN m1.quantity ELSE 0 END) AS Tritanium, SUM(CASE WHEN m1.materialTypeID = 35 THEN m1.quantity ELSE 0 END) AS Pyerite, SUM(CASE WHEN m1.materialTypeID = 36 THEN m1.quantity ELSE 0 END) AS Mexallon, SUM(CASE WHEN m1.materialTypeID = 37 THEN m1.quantity ELSE 0 END) AS Isogen, SUM(CASE WHEN m1.materialTypeID = 38 THEN m1.quantity ELSE 0 END) AS Nocxium, SUM(CASE WHEN m1.materialTypeID = 39 THEN m1.quantity ELSE 0 END) AS Zydrine, SUM(CASE WHEN m1.materialTypeID = 40 THEN m1.quantity ELSE 0 END) AS Megacyte, SUM(CASE WHEN m1.materialTypeID = 11399 THEN m1.quantity ELSE 0 END) AS Morphite FROM eve.invTypes t1 INNER JOIN eve.dgmTypeAttributes t2 ON t1.typeID = t2.typeID AND t2.attributeID = 633 AND t1.published = 1 AND t2.valueInt IN (0, 1, 2, 3, 4) -- metaLevel INNER JOIN eve.invGroups t3 ON t1.groupID = t3.groupID INNER JOIN eve.invTypeMaterials m1 ON t1.typeID = m1.typeID GROUP BY t1.typeID, t1.typeName, coalesce(t2.valueFloat,t2.valueInt), t1.groupID,
def gotConnection(conn, username, password): #print "Connected to broker." yield conn.authenticate(username, password) print "Authenticated. Ready to receive messages" chan = yield conn.channel(1) yield chan.channel_open() yield chan.queue_declare(queue="someQueueName") # Bind to submit.sm.* and submit.sm.resp.* routes yield chan.queue_bind(queue="someQueueName", exchange="messaging", routing_key='submit.sm.*') yield chan.queue_bind(queue="someQueueName", exchange="messaging", routing_key='submit.sm.resp.*') yield chan.basic_consume(queue='someQueueName', no_ack=True, consumer_tag="someTag") queue = yield conn.queue("someTag") #Build Mysql connection pool PySQLPool.getNewPool( ).maxActiveConnections = 20 #Set how many reusable conns to buffer in the pool print "Pooling 20 connections" #Connection parameters - Fill this info with your MySQL server connection parameters mysqlconn = PySQLPool.getNewConnection(username='******', password='******', host='server_host', db='database_name') print "Connected to MySQL" queryp = PySQLPool.getNewQuery(mysqlconn) # Wait for messages # This can be done through a callback ... while True: msg = yield queue.get() props = msg.content.properties pdu = pickle.loads(msg.content.body) if msg.routing_key[:15] == 'submit.sm.resp.': #print 'SubmitSMResp: status: %s, msgid: %s' % (pdu.status, # props['message-id']) #Update a record in mysql according to your own table. This will fire upon receiving a PDU response. #Make sure you already have a matching sms record to update. queryp.Query( "UPDATE table_name SET status='%s' WHERE messageid='%s'" % (pdu.status, props['message-id'])) PySQLPool.commitPool( ) #Very important, always execute a commit, autocommit doesn´t work well here elif msg.routing_key[:10] == 'submit.sm.': #print 'SubmitSM: from %s to %s, content: %s, msgid: %s' % (pdu.params['source_addr'], # pdu.params['destination_addr'], # pdu.params['short_message'], # props['message-id']) # This will fire every time a message is sent to the SumbitSM queue. # Create a record with the messagesent msg queryp.Query( "INSERT INTO table_name (messageid,carrier,date,dst,src,status,accountcode,cost,sale,plan_name,amaflags,content) VALUES ('%s','Carrier',NOW(),'%s','%s','8','00000','0.0','0.0','plan_name','some_status','%s') " % (props['message-id'], pdu.params['destination_addr'], pdu.params['source_addr'], pdu.params['short_message'])) """ The previous query works for the following table structure: id INT primary_key auto_increment messageid VARCHAR(128) carrier VARCHAR date DATETIME dst VARCHAR(15) src VARCHAR(15) status VARCHAR(10) accountcode INT cost FLOAT sale FLOAT plan_name VARCHAR(25) amaflags VARCHAR(10) content VARCHAR(160) """ PySQLPool.commitPool() # Remember to Commit else: print 'unknown route' # A clean way to tear down and stop yield chan.basic_cancel("someTag") yield chan.channel_close() chan0 = yield conn.channel(0) yield chan0.connection_close() reactor.stop()
#!/usr/bin/env python from bottle import route, run, request, abort import PySQLPool from xml.dom.minidom import Document from config import config import locale import pylibmc import threading import sys import time PySQLPool.getNewPool().maxActiveConnections = 100 PySQLPool.getNewPool().maxActivePerConnection = 1 mc = pylibmc.Client(["127.0.0.1"], binary=True, behaviors={"tcp_nodelay": True, "ketama": True}) pool = pylibmc.ClientPool() db = PySQLPool.getNewConnection(user=config['username'],passwd=config['password'],db=config['db']) locale.setlocale(locale.LC_ALL, 'en_US') maxThreads = 60 pool.fill(mc, maxThreads + 10) repoList = [] repoVal = {} def repoThread(): global repoList global repoVal while len(repoList) > 0: row = repoList.pop() regions = regionList() prices = getMineralBasket() refValue = ((row['Tritanium'] * prices['Tritanium']['sellavg']) + (row['Pyerite'] * prices['Pyerite']['sellavg']) +
from hotqueue import HotQueue import PySQLPool as pysqlpool import gevent from gevent.pool import Pool from gevent import monkey; gevent.monkey.patch_all() import emdr_config as config # Max number of greenlet workers MAX_NUM_POOL_WORKERS = 15 # use a greenlet pool to cap the number of workers at a reasonable level gpool = Pool(size=MAX_NUM_POOL_WORKERS) queue = HotQueue("emdr", unix_socket_path="/var/run/redis/redis.sock") queue_history = HotQueue("emdr_history", unix_socket_path="/var/run/redis/redis.sock") connection = pysqlpool.getNewConnection(username=config.dbuser, password=config.dbpass, unix_socket=config.dbsocket, db=config.dbname) # connection = pysqlpool.getNewConnection(username=config.dbuser, password=config.dbpass, host=config.dbhost , db=config.dbname) def main(): for message in queue.consume(): gpool.spawn(process, message) def process(message): query = pysqlpool.getNewQuery(connection) market_json = zlib.decompress(message) market_data = unified.parse_from_json(market_json) insertData = [] deleteData = [] if market_data.list_type == 'orders':
from gevent import monkey; gevent.monkey.patch_all() import zmq import scipy.stats as stats import numpy.ma as ma import numpy as np import PySQLPool from config import config from datetime import datetime import time import dateutil.parser np.seterr(all='ignore') PySQLPool.getNewPool().maxActiveConnections = 50 dbConn = PySQLPool.getNewConnection(user=config['username'],passwd=config['password'],db=config['db'], commitOnEnd=True) # The maximum number of greenlet workers in the greenlet pool. This is not one # per processor, a decent machine can support hundreds or thousands of greenlets. # I recommend setting this to the maximum number of connections your database # backend can accept, if you must open one connection per save op. MAX_NUM_POOL_WORKERS = 300 def main(): """ The main flow of the application. """ context = zmq.Context() subscriber = context.socket(zmq.SUB) # Connect to the first publicly available relay.
def get_connection(fn): with open(fn) as f: db_config = json.load(f) dbConnection = PySQLPool.getNewConnection(**db_config) return dbConnection
#!/usr/bin/env python #coding=utf-8 import config import cityhash import PySQLPool import tldextracter connection = PySQLPool.getNewConnection( username=config.DB_USER, password=config.DB_PASSWORD, host=config.DB_HOST, db=config.DB_DB, charset=config.DB_CHARSET) def init(): ''' CREATE TABLE `news_sites` ( `id` INT UNSIGNED PRIMARY KEY NOT NULL AUTO_INCREMENT, `domainhash` BIGINT UNSIGNED NOT NULL DEFAULT 0, `language` TINYINT UNSIGNED DEFAULT 1 NOT NULL, `name` VARCHAR(128) NOT NULL DEFAULT '', `domain` VARCHAR(100) NOT NULL DEFAULT '', `url` VARCHAR(512) NOT NULL DEFAULT '' ) Engine=INNoDB DEFAULT CHARSET=utf8; ''' pass def insert_site(name, language, url):
def getDB(): conn = PySQLPool.getNewConnection(**cherrypy.tree.apps[''].config["mysql"]) return conn
__author__ = 'robertfletcher' ## @package wePredictAPI.database.db # API Classes that are specific to Database connections from flask import g from wePredictAPI.settings import * import PySQLPool ## Defines the Global Connection Pool of Database connections connection = PySQLPool.getNewConnection(username=username, password=password, host=hostname, db=database) class DB(object): """ Contains Functions to get results from the database """ def __init__(self): """ """ def getResultParamaters(self,sql_query,values): """ Get result form database when SQL query and values statement :param sql_query: SQL Query :param values: Vales to be inserted into the query before being run :return data: Object Array of the results """ query = PySQLPool.getNewQuery(connection) query.Query(sql_query,values) data = query.record return data
"(SELECT opt_list.id FROM optin_list AS opt_list " + \ "WHERE opt_list.abreviation = %s)", 'query_desabo' : "SELECT desabo.id FROM optin_desabo AS desabo " + \ "WHERE desabo.mail_id = %s AND desabo.optin_id IN " + \ "(SELECT opt_list.id FROM optin_list AS opt_list " + \ "WHERE opt_list.abreviation = %s)", 'query_info' : "SELECT b.mail, id.civilite, id.prenom, id.nom FROM base AS b " + \ "INNER JOIN id_unik AS id ON b.id = id.mail_id " + \ "WHERE id.mail_id = %s", } import PySQLPool PySQLPool.getNewPool().maxActiveConnections = 50 my_con = PySQLPool.getNewConnection(db_user, db_pass, db_host, db_name) global query_pool query_pool = PySQLPool.getNewQuery(my_con) from mysql.connector.pooling import MySQLConnectionPool from mysql.connector import connect global pool pool = MySQLConnectionPool(pool_name=None, pool_size=50, pool_reset_session=True, user=db_user, password=db_pass, database=db_name, host=db_host)