Beispiel #1
0
def set_conninfo(conn_info, max_pool_count=3):
    conn = PySQLPool.getNewConnection(host=conn_info["hostname"],
                                      username=conn_info["username"],
                                      password=conn_info["password"],
                                      schema=conn_info["schema"])
    PySQLPool.getNewPool().maxActiveConnections = max_pool_count

    return conn
def process(market_data):
  query = pysqlpool.getNewQuery(connection)

  insertData = []
  for history in market_data.get_all_entries_ungrouped():
    insertData.append((history.type_id, history.region_id, history.historical_date, history.low_price, history.high_price, history.average_price, history.total_quantity, history.num_orders, history.generated_at))

  sql  = 'INSERT INTO `items_history` (`type_id`, `region_id`, `date`, `price_low`, `price_high`, `price_average`, '
  sql += '`quantity`, `num_orders`, `created`) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s) '
  sql += 'ON DUPLICATE KEY UPDATE '
  sql += '`price_low`=VALUES(`price_low`), `price_high`=VALUES(`price_high`), `price_average`=VALUES(`price_average`), '
  sql += '`quantity`=VALUES(`quantity`), `num_orders`=VALUES(`num_orders`)'
  query.executeMany(sql, insertData)

  gevent.sleep()
  pysqlpool.getNewPool().Commit()
  sys.stdout.write(".")
  sys.stdout.flush()
def set_conninfo(conn_info, max_pool_count = 3):
  conn = PySQLPool.getNewConnection(
           host     = conn_info["hostname"],
           username = conn_info["username"],
           password = conn_info["password"],
           schema   = conn_info["schema"]
         )
  PySQLPool.getNewPool().maxActiveConnections = max_pool_count

  return conn
Beispiel #4
0
import gevent
from gevent.pool import Pool
from gevent import monkey; gevent.monkey.patch_all()
import zmq
import scipy.stats as stats
import numpy.ma as ma
import numpy as np
import PySQLPool
from config import config
from datetime import datetime
import time
import dateutil.parser

np.seterr(all='ignore')

PySQLPool.getNewPool().maxActiveConnections = 50

dbConn = PySQLPool.getNewConnection(user=config['username'],passwd=config['password'],db=config['db'], commitOnEnd=True)

# The maximum number of greenlet workers in the greenlet pool. This is not one
# per processor, a decent machine can support hundreds or thousands of greenlets.
# I recommend setting this to the maximum number of connections your database
# backend can accept, if you must open one connection per save op.
MAX_NUM_POOL_WORKERS = 300

def main():
    """
    The main flow of the application.
    """
    context = zmq.Context()
    subscriber = context.socket(zmq.SUB)
Beispiel #5
0
#!/usr/bin/env python
from bottle import route, run, request, abort
import PySQLPool
from xml.dom.minidom import Document
from config import config
import locale
import pylibmc
import threading
import sys
import time

PySQLPool.getNewPool().maxActiveConnections = 100
PySQLPool.getNewPool().maxActivePerConnection = 1
mc = pylibmc.Client(["127.0.0.1"],
                    binary=True,
                    behaviors={
                        "tcp_nodelay": True,
                        "ketama": True
                    })
pool = pylibmc.ClientPool()
db = PySQLPool.getNewConnection(user=config['username'],
                                passwd=config['password'],
                                db=config['db'])
locale.setlocale(locale.LC_ALL, 'en_US')
maxThreads = 60
pool.fill(mc, maxThreads + 10)
repoList = []
repoVal = {}


def repoThread():
def process(message):
  query = pysqlpool.getNewQuery(connection)
  
  market_json = zlib.decompress(message)
  market_data = unified.parse_from_json(market_json)
  insertData = []
  deleteData = []
  
  if market_data.list_type == 'orders':
    orderIDs = []
    typeIDs = []
    if len(market_data) == 0:
      pass
    else:
      stuff = {}
      for region in market_data.get_all_order_groups():
        for order in region:
          # Timezone is silently discarded since it doesn't seem to be used in any messages I've seen
          insertData.append((order.order_id, str(order.generated_at).split("+", 1)[0], str(order.order_issue_date).split("+", 1)[0], order.type_id, round(order.price, 2), order.volume_entered, order.volume_remaining, order.order_range, order.order_duration, order.minimum_volume, int(order.is_bid), order.station_id, order.solar_system_id, order.region_id))
          orderIDs.append(str(int(order.order_id)))  # hacky SQLi protection
          typeIDs.append(str(int(order.type_id)))
        deleteData.append((region.region_id,))
        sql = "DELETE FROM `marketOrdersMem` WHERE `regionID` = %s AND `typeID` IN (" + ", ".join(list(set(typeIDs))) + ") AND `orderID` NOT IN (" + ", ".join(orderIDs) + ")"
        query.executeMany(sql, deleteData)

    # This query uses INSERT ... ON DUPLICATE KEY UPDATE syntax. It has a condition to only update the row if the new row's generationDate is newer than the stored generationDate. We don't want to replace our data with older data. We don't use REPLACE because we need to have this condition. Querying the table for existing data is possible for a cleaner statement, but it would probably result in slower inserts.
    sql  = 'INSERT INTO `marketOrdersMem` (`orderID`, `generationDate`, `issueDate`, `typeID`, `price`, `volEntered`, '
    sql += '`volRemaining`, `range`, `duration`, `minVolume`, `bid`, `stationID`, `solarSystemID`, `regionID`) '
    sql += 'VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) '
    sql += 'ON DUPLICATE KEY UPDATE '
    sql += '`issueDate`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`issueDate`), `issueDate`), '
    sql += '`typeID`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`typeID`), `typeID`), '
    sql += '`price`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`price`), `price`), '
    sql += '`volEntered`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`volEntered`), `volEntered`), '
    sql += '`volRemaining`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`volRemaining`), `volRemaining`), '
    sql += '`range`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`range`), `range`), '
    sql += '`duration`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`duration`), `duration`), '
    sql += '`minVolume`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`minVolume`), `minVolume`), '
    sql += '`bid`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`bid`), `bid`), '
    sql += '`stationID`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`stationID`), `stationID`), '
    sql += '`solarSystemID`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`solarSystemID`), `solarSystemID`), '
    sql += '`regionID`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`regionID`), `regionID`), '
    sql += '`generationDate`=IF(`generationDate` < VALUES(`generationDate`), VALUES(`generationDate`), `generationDate`)'
    query.executeMany(sql, insertData)
    # print("Finished a job of %d market orders" % len(market_data))
  
  elif market_data.list_type == 'history':
    queue_history.put(market_data)
    #pass
    # insertData = []
    # for history in market_data.get_all_entries_ungrouped():
    #   insertData.append((history.type_id, history.region_id, history.historical_date, history.low_price, history.high_price, history.average_price, history.total_quantity, history.num_orders, history.generated_at))

    # sql  = 'INSERT INTO `items_history` (`type_id`, `region_id`, `date`, `price_low`, `price_high`, `price_average`, '
    # sql += '`quantity`, `num_orders`, `created`) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s) '
    # sql += 'ON DUPLICATE KEY UPDATE '
    # sql += '`price_low`=VALUES(`price_low`), `price_high`=VALUES(`price_high`), `price_average`=VALUES(`price_average`), '
    # sql += '`quantity`=VALUES(`quantity`), `num_orders`=VALUES(`num_orders`)'
    # query.executeMany(sql, insertData)

  gevent.sleep()
  pysqlpool.getNewPool().Commit()
  sys.stdout.write(".")
  sys.stdout.flush()
def gotConnection(conn, username, password):
    #print "Connected to broker."
    yield conn.authenticate(username, password)

    print "Authenticated. Ready to receive messages"
    chan = yield conn.channel(1)
    yield chan.channel_open()

    yield chan.queue_declare(queue="someQueueName")

    # Bind to submit.sm.* and submit.sm.resp.* routes
    yield chan.queue_bind(queue="someQueueName",
                          exchange="messaging",
                          routing_key='submit.sm.*')
    yield chan.queue_bind(queue="someQueueName",
                          exchange="messaging",
                          routing_key='submit.sm.resp.*')

    yield chan.basic_consume(queue='someQueueName',
                             no_ack=True,
                             consumer_tag="someTag")
    queue = yield conn.queue("someTag")

    #Build Mysql connection pool
    PySQLPool.getNewPool(
    ).maxActiveConnections = 20  #Set how many reusable conns to buffer in the pool
    print "Pooling 20 connections"

    #Connection parameters - Fill this info with your MySQL server connection parameters
    mysqlconn = PySQLPool.getNewConnection(username='******',
                                           password='******',
                                           host='server_host',
                                           db='database_name')

    print "Connected to MySQL"
    queryp = PySQLPool.getNewQuery(mysqlconn)

    # Wait for messages
    # This can be done through a callback ...
    while True:
        msg = yield queue.get()
        props = msg.content.properties
        pdu = pickle.loads(msg.content.body)

        if msg.routing_key[:15] == 'submit.sm.resp.':
            #print 'SubmitSMResp: status: %s, msgid: %s' % (pdu.status,
            #       props['message-id'])

            #Update a record in mysql according to your own table. This will fire upon receiving a PDU response.
            #Make sure you already have a matching sms record to update.

            queryp.Query(
                "UPDATE table_name SET status='%s' WHERE messageid='%s'" %
                (pdu.status, props['message-id']))
            PySQLPool.commitPool(
            )  #Very important, always execute a commit, autocommit doesn´t work well here

        elif msg.routing_key[:10] == 'submit.sm.':

            #print 'SubmitSM: from %s to %s, content: %s, msgid: %s' % (pdu.params['source_addr'],
            #       pdu.params['destination_addr'],
            #       pdu.params['short_message'],
            #       props['message-id'])

            # This will fire every time a message is sent to the SumbitSM queue.
            # Create a record with the messagesent msg

            queryp.Query(
                "INSERT INTO table_name (messageid,carrier,date,dst,src,status,accountcode,cost,sale,plan_name,amaflags,content) VALUES ('%s','Carrier',NOW(),'%s','%s','8','00000','0.0','0.0','plan_name','some_status','%s') "
                % (props['message-id'], pdu.params['destination_addr'],
                   pdu.params['source_addr'], pdu.params['short_message']))
            """
                The previous query works for the following table structure:
                    id INT primary_key auto_increment
                    messageid VARCHAR(128)
                    carrier VARCHAR
                    date DATETIME
                    dst VARCHAR(15)
                    src VARCHAR(15)
                    status VARCHAR(10)
                    accountcode INT
                    cost FLOAT
                    sale FLOAT
                    plan_name VARCHAR(25)
                    amaflags VARCHAR(10)
                    content VARCHAR(160)
                """

            PySQLPool.commitPool()  # Remember to Commit
        else:
            print 'unknown route'

    # A clean way to tear down and stop
    yield chan.basic_cancel("someTag")
    yield chan.channel_close()
    chan0 = yield conn.channel(0)
    yield chan0.connection_close()

    reactor.stop()
Beispiel #8
0
from gevent import monkey

gevent.monkey.patch_all()
import zmq
import scipy.stats as stats
import numpy.ma as ma
import numpy as np
import PySQLPool
from config import config
from datetime import datetime
import time
import dateutil.parser

np.seterr(all='ignore')

PySQLPool.getNewPool().maxActiveConnections = 50

dbConn = PySQLPool.getNewConnection(user=config['username'],
                                    passwd=config['password'],
                                    db=config['db'],
                                    commitOnEnd=True)

# The maximum number of greenlet workers in the greenlet pool. This is not one
# per processor, a decent machine can support hundreds or thousands of greenlets.
# I recommend setting this to the maximum number of connections your database
# backend can accept, if you must open one connection per save op.
MAX_NUM_POOL_WORKERS = 300


def main():
    """
Beispiel #9
0
#!/usr/bin/env python
from bottle import route, run, request, abort
import PySQLPool
from xml.dom.minidom import Document
from config import config
import locale
import pylibmc
import threading
import sys
import time

PySQLPool.getNewPool().maxActiveConnections = 100
PySQLPool.getNewPool().maxActivePerConnection = 1
mc = pylibmc.Client(["127.0.0.1"], binary=True, behaviors={"tcp_nodelay": True, "ketama": True})
pool = pylibmc.ClientPool()
db = PySQLPool.getNewConnection(user=config['username'],passwd=config['password'],db=config['db'])
locale.setlocale(locale.LC_ALL, 'en_US')
maxThreads = 60
pool.fill(mc, maxThreads + 10)
repoList = []
repoVal = {}

def repoThread():
    global repoList
    global repoVal
    while len(repoList) > 0:
        row = repoList.pop()
        regions = regionList()
        prices = getMineralBasket()
        refValue = ((row['Tritanium'] * prices['Tritanium']['sellavg']) +
        (row['Pyerite'] * prices['Pyerite']['sellavg']) +