def process(num,l, client, process_id):
    count = 0
    err_count = 0
    time_tracker = TimeTracker()
    rpc_con = PortrayalConnection("localhost", 10099, "Portayal")
    rpc_con.connect()
    pid = os.getpid()
    set_logger("%s/logs/stress_test_%s.log" %(cur_dir, pid))

    while True:
        if num.value >= 100000:
            break
        key_base ="Cjinshan>b1a4ee97f6b51c4f702948a9a1a303bd>ItemBase"
        try:
            ib_res = client.get(key_base)
            ib = ItemBase()
            ib.ParseFromString(ib_res)
            if not ib.cid:
                continue
        except Exception as e:
            print("get redis data key:%s  error:%s" % (key_base, e))
            continue
        l.acquire()
        num.value = num.value + 1
        l.release()
        count = count + 1
        time_tracker.start_tracker()
        try:
            result = rpc_con.analyze_protobuf(ib_res, None)
            #print("process_id:%s, count:%s" %(process_id, count))
        except Exception as e:
            err_count = err_count + 1
            print("get rpc server data key:%s error:%s" %(key_base, e))
        time_tracker.end_tracker()
        time_tracker.time_elapsed()
        time_tracker.inc_job_counter(1)
        job_count = time_tracker.get_job_count()
        if job_count % 1000 == 0:
            module_path = result.modules_path
            ip = ItemProfile()
            ip.ParseFromString(result.value)
            ip_ = pb2json(ip)
            ip_json = json.loads(ip_)
            print(module_path)
            
            logging.info("avg_time:%s, request_num:%s, all_time:%s, err_count:%s thread_id:%s" % (time_tracker.average_time_cost(),
                  job_count, time_tracker.get_time_elapsed(), err_count, os.getpid()))
    avg_time = time_tracker.average_time_cost()
    request_num = time_tracker.get_job_count()
    all_time = time_tracker.get_time_elapsed()
    print("avg_time:%s, request_num:%s, all_time:%s, err_count:%s" % (avg_time, request_num, all_time, err_count))
def process(num,l, client, process_id, datas):
    count = 0
    err_count = 0
    time_tracker = TimeTracker()
    rpc_con = PortrayalConnection("localhost", 19090, "Portayal")
    rpc_con.connect()
    pid = os.getpid()
    set_logger("%s/logs/stress_test_%s.log" %(cur_dir, pid))
    data_len = len(datas) - 1

    while True:
        index = random.randint(0, data_len)
        key_base = datas[index]
        try:
            ib_res = client.get(key_base)
            ib = ItemBase()
            ib.ParseFromString(ib_res)
            if not ib.cid:
                continue
        except Exception as e:
            print("get redis data key:%s  error:%s" % (key_base, e))
            continue
        l.acquire()
        if num.value >= 50000:
            l.release()
            break
        num.value = num.value + 1
        l.release()
        count = count + 1
        time_tracker.start_tracker()
        try:
            result = rpc_con.get_info_data(ib.cid, ib.iid, ReqType.ITEMBASE)
            print(ib.cid, ib.iid)
            #print("process_id:%s, count:%s" %(process_id, count))
        except Exception as e:
            err_count = err_count + 1
            rpc_con.close()
            rpc_con = PortrayalConnection("localhost", 19090, "Portayal")
            rpc_con.connect()
            print("get rpc server data key:%s error:%s" %(key_base, e))
        time_tracker.end_tracker()
        time_tracker.time_elapsed()
        time_tracker.inc_job_counter(1)
        job_count = time_tracker.get_job_count()
    avg_time = time_tracker.average_time_cost()
    request_num = time_tracker.get_job_count()
    all_time = time_tracker.get_time_elapsed()
    print("avg_time:%s, request_num:%s, all_time:%s, err_count:%s" % (avg_time, request_num, all_time, err_count))
Esempio n. 3
0
#coding = utf-8
__author__ = '*****@*****.**'
import MySQLdb
import logging
import time
from set_logger import set_logger

set_logger('logs/mysql.log')
class MySQLClient():
    def __init__(self,host , port ,user  , passwd  , db  , charset = 'utf8'):
        while True:
            try:
                self.conn = MySQLdb.connect(host,user,passwd,db,port,charset)
                self.conn.autocommit(True)
                self.cursor = self.conn.cursor()
                self.cursor.execute("SET NAMES utf8")
                logging.info('mysql established')
                break
            except Exception as e:
                logging.error('mysql error: %s', e)
                time.sleep(10)

    def close(self):
        try:
            self.cursor.close()
            self.conn.close()
            logging.info('mysql closed')
        except Exception as e:
            logging.error('mysql close error %s: ',e)

Esempio n. 4
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging, traceback
import os, sys, uwsgi

import mobile_handler
import settings
import utils


#import log_conf
#logger = logging.getLogger("serverlog")

from set_logger import set_logger
set_logger(os.path.dirname(os.path.abspath(__file__)) + '/../log/server.log')

debug_flag = settings.DEBUG


def default_handler(request):
    status = '200 OK'
    header = [('Content-Type', 'text/html')]
    req_method = request['REQUEST_METHOD']
    path_info = request['PATH_INFO']
    #only for post
    env_param = utils.get_param(request)
    method = path_info[path_info.rindex('/')+1:]
    logging.info("req_method=%s, path_info=%s, param=%s, method=%s", req_method, path_info, env_param, method)
    # get the method
    # method is '' or None
    print method
def setLogger():
    filename = "logs/server.log"
    return set_logger(filename)
Esempio n. 6
0
        except Exception as e:
            logging.error("Error: host: %s \t func: %s \t content: %s \t ",
                          self.host, func, content)
            res = 0
        #logging.info("host: %s \t func: %s \t content: %s \t result: %s ",self.host, func, content, res)
        return res


##add usage:
def usage():
    print "Usage:"
    print "python server.py port processNum."


if (len(sys.argv) != 3):
    usage()
else:
    set_logger('logs/server.log')
    handler = PythonServiceServer()
    processor = WeiboService.Processor(handler)
    transport = TSocket.TServerSocket('0.0.0.0', int(sys.argv[1]))
    tfactory = TTransport.TBufferedTransportFactory()
    pfactory = TBinaryProtocol.TBinaryProtocolFactory()
    server = TProcessPoolServer.TProcessPoolServer(processor, transport,
                                                   tfactory, pfactory)
    server.setNumWorkers(int(sys.argv[2]))

    print "Starting python server..."
    server.serve()
    print "done!"
import time


from portrayal_server.protobuf.pbjson import pb2json, json2pb
from portrayal_server.protobuf.ItemProfile_pb2 import ItemProfile, ItemBase
from thrift.transport import TSocket, TTransport
from thrift.protocol import TBinaryProtocol
from thrift.transport.TTransport import TBufferedTransport

from portrayal_server.interface.portrayal import PortrayalService
from portrayal_server.interface.portrayal.ttypes import *

from set_logger import set_logger

cur_dir=os.path.dirname(os.path.abspath(__file__)) or os.getcwd()
set_logger(cur_dir + "/logs/fun_test.log")


class Connection(object):
    def __init__(self, ip, port, name):
        self.ip = ip
        self.port = port
        self.name = name
        self.transport = TSocket.TSocket(ip, port)
        #self.transport.setTimeout(2000)
        self.transport = TTransport.TBufferedTransport(self.transport)
        self.protocol = TBinaryProtocol.TBinaryProtocol(self.transport)

    def connect(self):
        msg = "Connect must implement connect"
        raise  Exception(msg)
import multiprocessing
import time


from  cost_tracker import TimeTracker
from portrayal_server.protobuf.pbjson import pb2json, json2pb
from portrayal_server.protobuf.ItemProfile_pb2 import ItemProfile, ItemBase
from bfd.harpc import client
from bfd.harpc.common import config
from portrayal_server.interface.portrayal import PortrayalService
from portrayal_server.interface.portrayal.ttypes import *

from set_logger import set_logger

cur_dir=os.path.dirname(os.path.abspath(__file__)) or os.getcwd()
set_logger(cur_dir + "/logs/stress_test.log")


pool = redis.ConnectionPool(host='172.18.1.101', port=6379, db=0)
redis_client = None


def init():
    global redis_client
    redis_client = redis.Redis(connection_pool=pool)
    print("init succeed")

def process(num,l, process_id, rpc_con):
    global redis_client
    count = 0
    err_count = 0
Esempio n. 9
0
#coding = utf-8
__author__ = '*****@*****.**'
import MySQLdb
import logging
import time
from set_logger import set_logger

set_logger('logs/mysql.log')
class MySQLClient():
    def __init__(self,host = '192.168.24.45', port = 3306,user = '******', passwd = 'qianfendian', db = 'weibo', charset = 'utf8'):
        while True:
            try:
                self.conn = MySQLdb.connect(host,user,passwd,db,port,charset)
                self.conn.autocommit(True)
                self.cursor = self.conn.cursor()
                self.cursor.execute("SET NAMES utf8")
                logging.info('mysql established')
                break
            except Exception as e:
                logging.error('mysql error: %s', e)
                time.sleep(10)

    def close(self):
        try:
            self.cursor.close()
            self.conn.close()
            logging.info('mysql closed')
        except Exception as e:
            logging.error('mysql close error %s: ',e)

Esempio n. 10
0
            #print '>>>>d_res: ', d_res
            return json.dumps(d_res, ensure_ascii=False)

        except KeyError:
            logging.info('%s ERROR', json_text)
            return json.dumps([])


def usage():
    print "Usage:"
    print "python server.py  processNum."


if (len(sys.argv) != 2):
    usage()
else:
    set_logger(cur_dir + '/../logs/clf_server.log')
    handler = ClfServiceHandler()
    processor = ClassificationService.Processor(handler)
    transport = TSocket.TServerSocket(config.IP, int(config.PORT))
    tfactory = TTransport.TBufferedTransportFactory()
    pfactory = TBinaryProtocol.TBinaryProtocolFactory()
    server = TProcessPoolServer.TProcessPoolServer(processor, transport,
                                                   tfactory, pfactory)
    server.setNumWorkers(int(sys.argv[1]))

    print "Starting classification server..."
    server.serve()
    print "Classification server started!"
Esempio n. 11
0
from set_logger import set_logger
from modulea.test1 import test1
from moduleb.test2 import test2
import logging
import logging.config

set_logger("test.log")

def test():
    logging.info("test")
    logging.debug("test")
    logging.error("test")
test()
test1()
test2()
Esempio n. 12
0
__author__ = '*****@*****.**'

import logging
from set_logger import set_logger

set_logger('logs/algo.log')

import sys
sys.path.append('../../thrift-api-def/gen-py')
from algo_bfd import WeiboService
from thrift import Thrift
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol


class algoClient():
    def __init__(self,ip,port):
        try:
            #transport = TSocket.TSocket('localhost', sys.argv[1])
            transport = TSocket.TSocket(ip,port )
            self.transport = TTransport.TBufferedTransport(transport)
            protocol = TBinaryProtocol.TBinaryProtocol(transport)
            self.client = WeiboService.Client(protocol)
            self.transport.open()
            logging.info('algo start')
        except Exception as e:
            logging.error('algo error: %s',e)
    def close(self):
        try:
            self.transport.close()
Esempio n. 13
0
__author__ = '*****@*****.**'

import logging
from set_logger import set_logger

set_logger('logs/algo.log')

import sys
sys.path.append('../../thrift-api-def/gen-py')
from algo_bfd import WeiboService
from thrift import Thrift
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol


class algoClient():
    def __init__(self, ip, port):
        try:
            #transport = TSocket.TSocket('localhost', sys.argv[1])
            transport = TSocket.TSocket(ip, port)
            self.transport = TTransport.TBufferedTransport(transport)
            protocol = TBinaryProtocol.TBinaryProtocol(transport)
            self.client = WeiboService.Client(protocol)
            self.transport.open()
            logging.info('algo start')
        except Exception as e:
            logging.error('algo error: %s', e)

    def close(self):
        try:
Esempio n. 14
0
        try:
            content = "".join(content.split("\n"))
            res = self.Review.predictPraise(str(content.strip()))
        except Exception as e:
            logging.error("Error: host: %s \t func: %s \t content: %s \t ",self.host, func, content)
            res = 0
        #logging.info("host: %s \t func: %s \t content: %s \t result: %s ",self.host, func, content, res)
        return res
     
##add usage:
def usage():
    print "Usage:"
    print "python server.py port processNum."

if( len(sys.argv) != 3 ):
    usage()
else:
    set_logger('logs/server.log')
    handler = PythonServiceServer()
    processor = WeiboService.Processor(handler)
    transport = TSocket.TServerSocket('0.0.0.0',int(sys.argv[1]))
    tfactory = TTransport.TBufferedTransportFactory()
    pfactory = TBinaryProtocol.TBinaryProtocolFactory()
    server = TProcessPoolServer.TProcessPoolServer(processor, transport, tfactory, pfactory)
    server.setNumWorkers(int(sys.argv[2]))

    print "Starting python server..."
    server.serve()
    print "done!"