Beispiel #1
0
 def Get_values(hosts,Redis_Key,keys,history,preset,operation):
     RC.delete(Redis_Key)
     hosts = [str(host[0]) for host in hosts]
     method = 'history.get'
     RC.set('%s_time' % Redis_Key, time.strftime('%Y-%m-%d %H:%M',time.localtime()))
     def Run(host):
         try:
             values = {}
             for key in keys:
                 value = zabbix_api.GET_value(host, keys[key], method, history)
                 if value != None:
                     if '.' in value:
                         value = float(value)
                     if operation == 'lt':
                         if int(value) < preset:
                             values[key] = value
                             RC.hset(Redis_Key, host, values)
                     if operation == 'gt':
                         if int(value) > preset:
                             values[key] = value
                             RC.hset(Redis_Key, host, values)
         except Exception as e:
             logging.error(e)
     pool = ThreadPool(4)
     pool.map_async(Run,hosts)
     pool.close()
     pool.join()
Beispiel #2
0
def test_replication_after_partition(both_https_cluster):
    node1.query("truncate table test_table")
    node2.query("truncate table test_table")

    manager = PartitionManager()

    def close(num):
        manager.partition_instances(node1, node2, port=9010)
        time.sleep(1)
        manager.heal_all()

    def insert_data_and_check(num):
        node1.query(
            "insert into test_table values('2019-10-15', {}, 888)".format(num))
        time.sleep(0.5)

    closing_pool = Pool(1)
    inserting_pool = Pool(5)
    cres = closing_pool.map_async(close,
                                  [random.randint(1, 3) for _ in range(10)])
    ires = inserting_pool.map_async(insert_data_and_check, list(range(100)))

    cres.wait()
    ires.wait()

    assert_eq_with_retry(node1, "SELECT count() FROM test_table", '100')
    assert_eq_with_retry(node2, "SELECT count() FROM test_table", '100')
Beispiel #3
0
 def getvasprun(self, files):
     m = self.m
     maindir = pwd()
     if m.engine == "vasp":
         calculator = self.getVaspRun_vasp
     elif m.engine == "lammps":
         calculator = self.getVaspRun_lammps
     self.jm = jobManager()
     for file in files:
         print(file)
         dir = "dirs/dir_" + file
         mkdir(dir)
         mv(file, dir + '/POSCAR')
         cd(dir)
         calculator()
         cd(maindir)
     self.jm.run()
     if m.th:
         mkdir(m.pbsname)
         self.thcode(files, m.pbsname)
         cp("dirs", m.pbsname)
         passthru("tar zcf %s.tar.gz %s" % (m.pbsname, m.pbsname))
     print('start check')
     self.jm.check()
     if m.engine == "lammps1":
         from multiprocessing.dummy import Pool
         pool = Pool()
         pool.map_async(lammpsvasprun, files)
         pool.close()
         pool.join()
Beispiel #4
0
def main():
    pool = ThreadPool(1000)  # 一定要指定线程数,不然就坑了自己(自己想为啥)
    print("准备开抢")
    pool.map_async(shopping, range(1000))
    pool.close()
    pool.join()
    print(f"剩余库存{count}")
    def test_concurrent_writes(self):
        sender_pool = ThreadPool(100)
        receiver_pool = ThreadPool(100)

        message = can.Message(
            arbitration_id=0x123,
            extended_id=True,
            timestamp=121334.365,
            data=[254, 255, 1, 2]
        )
        workload = 1000 * [message]

        def sender(msg):
            self.bus1.send(msg)

        def receiver(_):
            result = self.bus2.recv(timeout=2.0)
            self.assertIsNotNone(result)
            self.assertEqual(result, message)

        sender_pool.map_async(sender, workload)
        receiver_pool.map_async(receiver, len(workload) * [None])

        sender_pool.close()
        sender_pool.join()
        receiver_pool.close()
        receiver_pool.join()
Beispiel #6
0
def main():
    p = ThreadPool()
    p.map_async(test, list(range(5)))
    p.close()
    p.join()

    print(num)  # 应该是500000,发生了数据混乱,结果少了很多
Beispiel #7
0
    def get_past_data(self, thread=10):
        self.c1, self.c2, self.conn1, self.conn2 = self.db_init()
        print("正在获取基金的历史数据")
        # 获取所有的基金的code
        codes = self.get_fund_earning_perday()
        self.c1.execute(self.create_info_table)
        code = self.code_split(codes, 100)
        for i in code:
            # 多线程爬取
            print("正在以:", thread, "个线程爬取历史数据")
            pool = ThreadPool(thread)
            pool.map_async(self.detail, i)

            # 开始数据库插入进程
            #p = Thread(target=write2sql)
            #p.start()

            pool.close()
            pool.join()
            #p.join()
            self.write2sql()
            self.sql1 = []
            self.sql2 = []

        # 关闭数据库连接
        self.close_db()
Beispiel #8
0
def __processing(processArray):
    # init global vars
    global cur
    global curPercent
    global countServer
    global countOpen
    global once
    global PLOCK
    global TLOCK

    # split the host list in number of threads
    threadHostArray = array_split(processArray, this.threads)

    # init threading pool
    tPool = ThreadPool(this.threads)
    try:
        # start threading pool
        tPool.map_async(__threading, threadHostArray).get(9999999)
    except KeyboardInterrupt:
        PLOCK.aquire()
        if once.value == 0:
            once.value += 1
            print('\n\n[!] killing all python processes')
            # use pkill to make sure every thread is killed
            os.system('pkill python')
            sys.exit(0)
    # close threading pool
    tPool.close()
    tPool.join()
Beispiel #9
0
def test_exception_message(started_cluster):
    assert node1.query("select number from nums order by number") == "0\n1\n"

    def node_busy(_):
        for i in range(10):
            node1.query("select sleep(2)", user="******", ignore_error=True)

    busy_pool = Pool(3)
    busy_pool.map_async(node_busy, range(3))
    time.sleep(1)  # wait a little until polling starts

    with pytest.raises(Exception) as exc_info:
        for i in range(3):
            assert (node1.query(
                "select number from remote('node1', 'default', 'nums')",
                user="******",
            ) == "0\n1\n")
    exc_info.match("Too many simultaneous queries for all users")

    for i in range(3):
        assert (node1.query(
            "select number from remote('node1', 'default', 'nums')",
            user="******",
            settings={"max_concurrent_queries_for_all_users": 0},
        ) == "0\n1\n")
Beispiel #10
0
def mysql_scheduler():
    t = time.strftime('%Y-%m-%d', time.localtime())
    MYSQL = Mysql.MYSQL(USER, PASSWORD, HOST, PORT, DB)

    def Run_sql(val):
        id, IP, PORT, DB = val[:4]
        CMD = val[5]
        val = Mysql.Query_sql(IP, PORT, DB, CMD)
        if val:
            val = str(val).replace("'", '')
        else:
            val = 'None'
        cmd = "update sql_scheduler set status = '已执行' ,results = '%s' where id = '%s';" % (
            val, id)
        loging.write(cmd, log_path=log_path)
        MYSQL.Run(cmd)
        MYSQL.Close()

    try:
        cmd = "select * from sql_scheduler where status = '未执行' and time = '%s';" % t
        values = MYSQL.Run(cmd)
        MYSQL.Close()
        if values:
            # 并发执行
            POOLS = Third_pool(10)
            POOLS.map_async(Run_sql, values)
            POOLS.close()
            POOLS.join()
    except Exception as e:
        loging.write(e, log_path=log_path)
Beispiel #11
0
def main():
    p = ThreadPool()
    p.map_async(test, list(range(5)))
    p.close()
    p.join()

    print(f"{current_process().name}")
Beispiel #12
0
    def handle_download_picture(self):
        '''这是下载图片的主函数'''

        print("进入图片下载程序...")
        picture_list = []
        conn = sqlite3.connect('iwatu.db')
        cursor = conn.cursor()
        sql = 'select dlink,title,referer from dlink where log=0'
        picture_list = cursor.execute(sql).fetchall()
        if cursor: cursor.close()
        if conn:
            conn.commit()
            conn.close()

        if len(picture_list) != 0:
            print("下载任务进行中,请稍后...")
            pool = ThreadPool(self.ThreadPool)
            '''
			for each_tuple in picture_list:
				print("添加新任务到子进程(父Pid:%s):%s" % (os.getpid(),each_tuple[0]))
				#根据sql的查询内容来看,这个list应该分别是0放dlink,1放title,2放referer
				p.apply_async(self.multi_handle_download_picture, args=(each_tuple,))
			'''
            pool.map_async(self.multi_handle_download_picture, picture_list)
            pool.close()
            pool.join()
            print("All Done!")
        else:
            print("没有搜索到需要下载的图片连接记录")
Beispiel #13
0
    def handle_subjectpage(self, single_subjectpage=None):
        '''这是主题页处理函数,负责把每一个图片的地址分析出来'''

        print("进入主题页处理程序...")
        targeturl = []
        #先判断参数那里有没有任务传进来,如果有,先加上
        if single_subjectpage != None:
            targeturl.append(single_subjectpage)
        #下面把数据库里未处理过的主题也一并放进来去处理
        conn = sqlite3.connect('iwatu.db')
        cursor = conn.cursor()
        result = cursor.execute(
            'select * from urls where log=0 and competence=1 and domain=?',
            (self.domain, )).fetchall()
        old_exists_list = []
        for each_tuple in result:
            old_exists_list.append(each_tuple[0])
        targeturl.extend(
            old_exists_list
        )  #extend会自动把来源按照“一定的系统内部规律”打散再加入:列表会按照“,”打散成字符串;字符串则会打散成字母
        if cursor: cursor.close()
        if conn:
            conn.commit()
            conn.close()

        if len(targeturl) > 0:
            #创建进程池正式开始处理啦
            print("分析任务进行中,请稍后...")
            pool = ThreadPool(self.ThreadPool)
            pool.map_async(self.multi_handle_subjectpage, targeturl)
            pool.close()
            pool.join()
            print("All Done!")
        else:
            print("没有主题页需要处理")
Beispiel #14
0
    def handle_pagelist(self, targeturl, x=None, y=0):
        '''这是列表页处理函数,负责把主题页的地址分析出来然后再提交给主题页处理函数去处理'''

        print("进入列表页处理程序...")

        list_urls = []  #这是用来放当前要处理的连接的容器

        #判断任务是单一列表页还是多个列表页
        if targeturl.find('*') != -1:
            if x == None:
                x = 1
                print("你指定了任务为多页面模式,但是页数未指定,程序采用默认值:'1'")
            urlhandler = targeturl.split('*')  #以*为标记截断原始URL
            starturl = urlhandler[0] + str(x) + urlhandler[1]  #初始化任务的第一个列表页地址
            list_urls.append(starturl)  #并且放入页面链接的list
            while x <= y:  #用循环把用户指定的连接创建出来并且一一放入list里面
                x = x + 1
                mission_url = urlhandler[0] + str(x) + urlhandler[1]
                list_urls.append(mission_url)  #循环生成链接地址并且放入list里面
        else:
            list_urls.extend(targeturl)  #如果找不到'*'那么就是单一页面的任务而已,直接放入去就可以了

        if len(list_urls) > 0:
            print("分析任务进行中,请稍后...")
            pool = ThreadPool(self.ThreadPool)
            pool.map_async(self.multi_handle_pagelist, list_urls)
            pool.close()
            pool.join()  #调用方法要加‘()’啊你妹!
            print("All Done!")
        else:
            print("没有列表页需要处理")
Beispiel #15
0
def __start(ipList, domainList, threads):
	this.ipList = ipList
	this.domainList = domainList
	this.logFile = open("log/load"+str(time.time())+".log","w")
	this.end = len(this.ipList)
	this.cur = 0
	this.packets = {}
	
	fileName = 'conf/mdb.txt'
	with open(fileName) as lines:
		this.creds = []
		for line in lines:
			line = line.replace('\n', '')
			this.creds.append(line)
	
	pool = ThreadPool(int(threads)) 
	try:
		if this.mode == "dns":
			pool.map_async(__threadDNS, this.ipList).get(9999999)
		else:
			pool.map_async(__threadOther, this.ipList).get(9999999)
	except KeyboardInterrupt:
		print "\n[!] we killed killed all python processes for you"
		os.system('pkill python')
		sys.exit(0)
	pool.close()
	pool.join()
	
	this.logFile.close()
Beispiel #16
0
 def thread_start(self, thread_run, args):
     if len(args) > 0:
         self.preAction(args)
         pool = Pool(self.cpu)
         pool.map_async(thread_run, args)
         pool.close()
         pool.join()
Beispiel #17
0
    def test_concurrent_writes(self):
        sender_pool = ThreadPool(100)
        receiver_pool = ThreadPool(100)

        message = can.Message(
            arbitration_id=0x123,
            channel=self.CHANNEL_1,
            is_extended_id=True,
            timestamp=121334.365,
            data=[254, 255, 1, 2]
        )
        workload = 1000 * [message]

        def sender(msg):
            self.bus1.send(msg)

        def receiver(_):
            return self.bus2.recv(timeout=2.0)

        sender_pool.map_async(sender, workload)
        for msg in receiver_pool.map(receiver, len(workload) * [None]):
            self.assertIsNotNone(msg)
            self.assertEqual(message.arbitration_id, msg.arbitration_id)
            self.assertTrue(message.equals(msg, timestamp_delta=None))

        sender_pool.close()
        sender_pool.join()
        receiver_pool.close()
        receiver_pool.join()
Beispiel #18
0
 def getvasprun(self, files):
     m = self.m
     maindir = pwd()
     if m.engine == "vasp":
         calculator = self.getVaspRun_vasp
     elif m.engine == "lammps":
         calculator = self.getVaspRun_lammps
     self.jm = jobManager()
     for file in files:
         print(file)
         dir = "dirs/dir_" + file
         mkdir(dir)
         mv(file, dir + '/POSCAR')
         cd(dir)
         calculator()
         cd(maindir)
     self.jm.run()
     if m.th:
         mkdir(m.pbsname)
         self.thcode(files, m.pbsname)
         cp("dirs", m.pbsname)
         passthru("tar zcf %s.tar.gz %s" % (m.pbsname, m.pbsname))
     print('start check')
     self.jm.check()
     if m.engine == "lammps1":
         from multiprocessing.dummy import Pool
         pool = Pool()
         pool.map_async(lammpsvasprun, files)
         pool.close()
         pool.join()
Beispiel #19
0
def main():
    p = ThreadPool()
    p.map_async(test, list(range(5)))
    p.close()
    p.join()

    print(num)
Beispiel #20
0
 def mutil_brute(self, _data_list: list):
     pool = Pool(20)
     pool.map_async(ServiceBrute.brute_main,
                    _data_list,
                    callback=self.CallBackFunc)
     pool.close()
     pool.join()
Beispiel #21
0
    def Get_values(hosts, Redis_Key, keys, history, preset, operation):
        RC.delete(Redis_Key)
        hosts = [str(host[0]) for host in hosts]
        method = 'history.get'
        RC.set('%s_time' % Redis_Key,
               time.strftime('%Y-%m-%d %H:%M', time.localtime()))

        def Run(host):
            try:
                values = {}
                for key in keys:
                    value = zabbix_api.GET_value(host, keys[key], method,
                                                 history)
                    if value != None:
                        if '.' in value:
                            value = float(value)
                        if operation == 'lt':
                            if int(value) < preset:
                                values[key] = value
                                RC.hset(Redis_Key, host, values)
                        if operation == 'gt':
                            if int(value) > preset:
                                values[key] = value
                                RC.hset(Redis_Key, host, values)
            except Exception as e:
                logging.error(e)

        pool = ThreadPool(4)
        pool.map_async(Run, hosts)
        pool.close()
        pool.join()
    def test_concurrent_writes(self):
        sender_pool = ThreadPool(100)
        receiver_pool = ThreadPool(100)

        message = can.Message(arbitration_id=0x123,
                              extended_id=True,
                              timestamp=121334.365,
                              data=[254, 255, 1, 2])
        workload = 1000 * [message]

        def sender(msg):
            self.bus1.send(msg)

        def receiver(_):
            result = self.bus2.recv(timeout=2.0)
            self.assertIsNotNone(result)
            self.assertEqual(result, message)

        sender_pool.map_async(sender, workload)
        receiver_pool.map_async(receiver, len(workload) * [None])

        sender_pool.close()
        sender_pool.join()
        receiver_pool.close()
        receiver_pool.join()
Beispiel #23
0
def test_concurrent_queries(started_cluster):
    conn = get_postgres_conn(started_cluster, started_cluster.postgres_ip,
                             True)
    cursor = conn.cursor()

    node1.query('''
        CREATE TABLE test_table (key UInt32, value UInt32)
        ENGINE = PostgreSQL('postgres1:5432', 'clickhouse', 'test_table', 'postgres', 'mysecretpassword')'''
                )

    cursor.execute('CREATE TABLE test_table (key integer, value integer)')

    prev_count = node1.count_in_log('New connection to postgres1:5432')

    def node_select(_):
        for i in range(20):
            result = node1.query("SELECT * FROM test_table", user='******')

    busy_pool = Pool(20)
    p = busy_pool.map_async(node_select, range(20))
    p.wait()
    count = node1.count_in_log('New connection to postgres1:5432')
    print(count, prev_count)
    # 16 is default size for connection pool
    assert (int(count) <= int(prev_count) + 16)

    def node_insert(_):
        for i in range(5):
            result = node1.query(
                "INSERT INTO test_table SELECT number, number FROM numbers(1000)",
                user='******')

    busy_pool = Pool(5)
    p = busy_pool.map_async(node_insert, range(5))
    p.wait()
    result = node1.query("SELECT count() FROM test_table", user='******')
    print(result)
    assert (int(result) == 5 * 5 * 1000)

    def node_insert_select(_):
        for i in range(5):
            result = node1.query(
                "INSERT INTO test_table SELECT number, number FROM numbers(1000)",
                user='******')
            result = node1.query("SELECT * FROM test_table LIMIT 100",
                                 user='******')

    busy_pool = Pool(5)
    p = busy_pool.map_async(node_insert_select, range(5))
    p.wait()
    result = node1.query("SELECT count() FROM test_table", user='******')
    print(result)
    assert (int(result) == 5 * 5 * 1000 * 2)

    node1.query('DROP TABLE test_table;')
    cursor.execute('DROP TABLE test_table;')

    count = node1.count_in_log('New connection to postgres1:5432')
    print(count, prev_count)
    assert (int(count) <= int(prev_count) + 16)
 def _start(self):
     try:
         print '-' * 60
         print u'{}[-] : {}{} '.format(self.O,
                                             socket.gethostbyname(self.target), self.W)
         print '-' * 60
         # 线程数
         pool = ThreadPool(processes=100)
         # get传递超时时间,用于捕捉ctrl+c
         pool.map_async(self.run, self.ports).get(0xffff)
         pool.close()
         pool.join()
         fo = open("./file/result/Port_scan.txt","w")
         for i in list_res:
             fo.write(i)
             fo.write('\n')
         fo.close()
         print '-' * 60
         print u'{}[-] : {} .{}'.format(self.O,
                                               time() - self.time, self.W)
     except Exception as e:
         print e
     except KeyboardInterrupt:
         print self.R + u'\n[-] ...'
         sys.exit(1)
Beispiel #25
0
def parallel_run(func, params, count=0, thread=False, monitor_func=None):
    from multiprocessing import cpu_count
    from multiprocessing import Pool
    if count == 0:
        count = cpu_count()
    if count >= 4:
        count = count * 2
    if thread:
        from multiprocessing.dummy import Pool as ThreadPool
        pool = ThreadPool(count)
    else:
        pool = Pool(count)
    sync = True
    if monitor_func is not None:
        sync = False
    ret = []
    if sync:
        ret = pool.map(func, params)
    else:

        def over_callback(_rets):
            params.clear()
            for _ret in _rets:
                ret.append(_ret)

        pool.map_async(func, params, callback=over_callback)
    import time
    time.sleep(0.5)
    if monitor_func is not None:
        monitor_func(params)
    pool.close()
    pool.join()
    return ret
Beispiel #26
0
def test_concurrent_queries(started_cluster):
    conn = get_postgres_conn(started_cluster.postgres_ip, started_cluster.postgres_port, database=False)
    cursor = conn.cursor()
    database_name = 'concurrent_test'

    cursor.execute(f'DROP DATABASE IF EXISTS {database_name}')
    cursor.execute(f'CREATE DATABASE {database_name}')
    conn = get_postgres_conn(started_cluster.postgres_ip, started_cluster.postgres_port, database=True, database_name=database_name)
    cursor = conn.cursor()
    cursor.execute('CREATE TABLE test_table (key integer, value integer)')

    node1.query(f'''
        CREATE TABLE test.test_table (key UInt32, value UInt32)
        ENGINE = PostgreSQL(postgres1, database='{database_name}', table='test_table')
    ''')

    node1.query(f'''
        CREATE TABLE test.stat (numbackends UInt32, datname String)
        ENGINE = PostgreSQL(postgres1, database='{database_name}', table='pg_stat_database')
    ''')

    def node_select(_):
        for i in range(20):
            result = node1.query("SELECT * FROM test.test_table", user='******')

    def node_insert(_):
        for i in range(20):
            result = node1.query("INSERT INTO test.test_table SELECT number, number FROM numbers(1000)", user='******')

    def node_insert_select(_):
        for i in range(20):
            result = node1.query("INSERT INTO test.test_table SELECT number, number FROM numbers(1000)", user='******')
            result = node1.query("SELECT * FROM test.test_table LIMIT 100", user='******')

    busy_pool = Pool(30)
    p = busy_pool.map_async(node_select, range(30))
    p.wait()

    count = int(node1.query(f"SELECT numbackends FROM test.stat WHERE datname = '{database_name}'"))
    print(count)
    assert(count <= 18)

    busy_pool = Pool(30)
    p = busy_pool.map_async(node_insert, range(30))
    p.wait()

    count = int(node1.query(f"SELECT numbackends FROM test.stat WHERE datname = '{database_name}'"))
    print(count)
    assert(count <= 18)

    busy_pool = Pool(30)
    p = busy_pool.map_async(node_insert_select, range(30))
    p.wait()

    count = int(node1.query(f"SELECT numbackends FROM test.stat WHERE datname = '{database_name}'"))
    print(count)
    assert(count <= 18)

    node1.query('DROP TABLE test.test_table;')
    node1.query('DROP TABLE test.stat;')
Beispiel #27
0
class MyPool:
    def __init__(self, n=5):
        self.queue = Queue()
        self.pool = Pool(n)
        self.n = n

    def start(self):
        self.pool.map_async(self.run, range(self.n))

    def stop(self):
        self.pool.close()
        self.pool.join()

    def run(self, i):
        while True:
            try:
                req = self.queue.get(timeout=1)
                self.process(req, i)
                self.queue.task_done()
            except Empty:
                pass
            except:
                traceback.print_exc()
                self.queue.task_done()

    @property
    def count(self):
        return self.queue.unfinished_tasks

    def put(self, req):
        self.queue.put(req)

    def process(self, req, i):
        pass
Beispiel #28
0
def update_advalue(update=False):
    ADValueTable.metadata.create_all(db)
    KDJValueTable.metadata.create_all(db)
    # idlist = sc.get_sza_index(index=0)
    # idlist.extend(sc.get_sha_index(index=0))
    idlist = sc.get_china_stock_list(index=0)
    # idlist = ['600777']
    # lock = threading.Lock()
    # for num in idlist:
    #     while threading.activeCount() >= 10:
    #         time.sleep(10)
    #     th = ADValueThread(lock, num, update)
    #     th.setDaemon(True)
    #     th.start()
    # for t in threading.enumerate():
    #     try:
    #         t.join()
    #     except Exception as ex:
    #         logger.debug('%s' % str(ex))
    #         continue
    lock = multiprocessing.Lock()
    tp = ThreadPool(10)
    # lock = multiprocessing.Manager().Lock()
    # tp = multiprocessing.Pool()
    tp.map_async(advalueprocess,
                 [(lock, num, update) for num in idlist]).get(36000)
    tp.close()
    tp.join()
Beispiel #29
0
    def run_steps(self, steps=1):
        self.steps = steps

        pool = ThreadPool()
        pool.map_async(self._run, self.planners)

        pool.close()
        pool.join()
Beispiel #30
0
    def run_steps(self, steps=1):
        self.steps = steps
        
        pool = ThreadPool()
        pool.map_async(self._run, self.planners)

        pool.close()
        pool.join()
Beispiel #31
0
def test4():
    for n in range(100000):
        def test5(i):
            n += i
    tpool = TheaderPool(processes=1)
    tpool.map_async(test5,range(100000))
    tpool.close()
    tpool.join()
Beispiel #32
0
def main():
    p = ThreadPool()
    # 两个生产者
    p.map_async(Shop, range(2))
    # 五个消费者
    p.map_async(User, range(5))
    p.close()
    p.join()
Beispiel #33
0
def BuilTable():
    """
    Construit la table. Utilise du multithreading
    """

    print("Construction de la table en cours")

    global alphabet
    global table701
    global temp
    global alphabet
    global doc

    temp = [[]] * 701
    table701 = [deepcopy(temp) for i in range(0, 359)]

    f = open("word2.txt")
    temp = f.readlines()
    f.close()

    pool = ThreadPool(16)

    a = time.time()

    doc = []
    pool.map(constrDoc, temp)
    pool.close()
    pool.join()

    print(time.time() - a)
    print("Mots recuperes du fichier")

    pool = ThreadPool(16)

    alphabet = []
    pool.map(constrAlphabet, doc)
    pool.close()
    pool.join()

    a = time.time()

    alphabet = uniq(alphabet)

    print(time.time() - a)

    print("Alphabet construit")

    a = time.time()

    pool1 = ThreadPool(16)

    pool1.map_async(constrTable, doc)
    pool1.close()
    pool1.join()

    print(time.time() - a)

    print("Table construite")
Beispiel #34
0
    def execute_moves(self, moves):
        def act(args):
            args[0](*args[1:])

        pool = ThreadPool(len(moves))
        pool.map_async(act, moves)

        pool.close()
        pool.join()
Beispiel #35
0
def main():
    from multiprocessing.dummy import Pool
    os.chdir(os.path.join(os.path.dirname(__file__), os.pardir))
    pool = Pool()
    tasklist = parse_emakefile('Emakefile')
    pool.map_async(do_make, tasklist).get(0xfff)
    pool.close()
    pool.join()
    print '±àÒë³É¹¦£¡'
Beispiel #36
0
    def execute_moves(self, moves):
        def act(args):
            args[0](*args[1:])

        pool = ThreadPool(len(moves))
        pool.map_async(act, moves)

        pool.close()
        pool.join()
Beispiel #37
0
def test4():
    for n in range(1000000):

        def test5(i):
            n += i

    tpool = TheaderPool(processes=1)
    tpool.map_async(test5, range(1000000))
    tpool.close()
    tpool.join()
Beispiel #38
0
def get_taobao_seller(keywords):
    # 爬取指定数量的店铺信息
    def get_seller_from_num(nums):
        url = "https://shopsearch.taobao.com/search?data-key=s&data-value={0}&" \
              "ajax=true&_ksTS=1481770098290_1972&callback=jsonp602&app=shopsearch&q={1}&js=1&isb=0".format(
            nums, keywords)
        # url = "https://shopsearch.taobao.com/search?data-key=s&data-value={0}&ajax=true&callback=jsonp602&app=shopsearch&q={1}".format(nums,keywords)
        content = requests.get(url)
        wbdata = content.text[11:-2]
        data = json.loads(wbdata)
        shop_list = data['mods']['shoplist']['data']['shopItems']
        for s in shop_list:
            name = s['title']  # 店铺名
            nick = s['nick']  # 卖家昵称
            nid = s['nid']  # 店铺ID
            provcity = s['provcity']  # 店铺区域
            shopUrl = s['shopUrl']  # 店铺链接
            totalsold = s['totalsold']  # 店铺宝贝数量
            procnt = s['procnt']  # 店铺销量
            startFee = s['startFee']  # 未知
            mainAuction = s['mainAuction']  # 店铺关键词
            userRateUrl = s['userRateUrl']  # 用户评分链接
            dsr = json.loads(s['dsrInfo']['dsrStr'])
            goodratePercent = dsr['sgr']  # 店铺好评率
            srn = dsr['srn']  # 店铺等级
            category = dsr['ind']  # 店铺分类
            mas = dsr['mas']  # 描述相符
            sas = dsr['sas']  # 服务态度
            cas = dsr['cas']  # 物流速度
            data = {
                'name': name,
                'nick': nick,
                'nid': nid,
                'provcity': provcity,
                'shopUrl': shopUrl,
                'totalsold': totalsold,
                'procnt': procnt,
                'startFee': startFee,
                'goodratePercent': goodratePercent,
                # 'mainAuction':mainAuction,
                'userRateUrl': userRateUrl,
                'srn': srn,
                'category': category,
                'mas': mas,
                'sas': sas,
                'cas': cas
            }
            print(data)
            print("插入数据成功")

    pool = TheaderPool(processes=1)
    pool.map_async(get_seller_from_num, range(0, 10020, 20))
    pool.close()
    pool.join()
Beispiel #39
0
	def run(self):
		try:
			pool = ThreadPool(processes=self.thread)
			pool.map_async(self.start, self.ips).get(0xffff)
			pool.close()
			pool.join()
		except Exception as e:
			pass
		except KeyboardInterrupt:
			print u'\n{}[-] 用户终止扫描...{}'.format(R, W)
			sys.exit(1)
		finally:
			print '-'*55
			print u'{}[+] 扫描完成耗时 {} 秒.{}'.format(O, time.time()-self.time, W) 
    def main(self):
        self.s = self.login()
        self.keyword = input('请输入要搜的图的关键字:') #raw_input(u'请输入...')输入中文?
        print(self.keyword)
        rootPath = 'E:\ProgramCode\Python\pixivStar\\'+self.keyword
        if not os.path.exists(rootPath):
            os.mkdir(rootPath) #TODO 自定义路径
        #TODO arg参数,收藏数
        #get_search(keyword)
        searchUrl = self.index_url + '/search.php?s_mode=s_tag&word='+self.keyword+'&order=date_d&p='+str(1)
        respo = self.s.get(searchUrl, headers=self.header)
        content1 = respo.content.decode('utf-8')
        '''with open('contentExample.txt', 'w', encoding='utf-8') as f:
            print(content1,f)'''
        descriPat = re.compile('<meta name="description" content="(.*?)">', re.S)
        descri = re.findall(descriPat, content1)[0]
        print(descri)
        amountPat = re.compile('count-badge">(.*?)\u4ef6</span>', re.S)
        amount = int(re.findall(amountPat, content1)[0]) // 20
        print('页数:'+str(amount))
        '''self.thumbHeader = {'Host':'i2.pixiv.net', #host i1 ~ i4
            'If-Modified-Since':'Thu, 11 Aug 2016 01:15:38 GMT',
            'Referer':searchUrl,
            'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36}'}

        signCheckPat = re.compile('signup-sina-button"(.*?)</button>')
        sign = re.findall(signCheckPat, content1)
        print(len(sign))'''
        '''signCheckPat2 = re.compile('bookmarks"(.*?)</li>')
        sign2 = re.findall(signCheckPat2, content1)
        print(len(sign2))'''
        #TODO 两种搜索模式
        #'s_mode=' 's_tag' #tag
        #'s_tc' #标题和介绍

        pool = ThreadPool(8)
        urls = []
        for page in range(1, amount):
            urls.append(self.index_url + '/search.php?word='+self.keyword+'&order=date_d&p='+str(page))
            #self.get_search(self.index_url + 'search.php?s_mode=s_tag&word='+self.keyword+'&order=date_d&p='+str(page))
        pool.map_async(self.get_search, urls)
        pool.close()
        pool.join()
        
        
        print('--mission complete--')
        #sys.stdout = io.TextIOWrapper(sys.stdout.buffer,encoding='utf-8')
        '''fileName = 'E:\ProgramCode\Python\searchPage.txt'
Beispiel #41
0
def multiRunuser():
    pool = ThreadPool(cpu_count() * 8)
    global ip_list
    global results
    results = pool.map_async(runuser, ip_list)
    pool.close()
    pool.join()
Beispiel #42
0
class BaseLogger:
    
    def __init__(self, outfile ='baselogger.log', sockets=[]):
        """
        A basic logging system.
        :param outfile: The file the logging output will be directed to.
        :param sockets: A list of external sockets who wish to be notified
        
        """
        self.outfile = 'baselogger.out'
        self.fh = open(outfile, 'a')
        self.sockets = sockets
        self.level_text = ['INFO', 'WARNING', 'ERROR']
        self.pool = Pool(2 if self.sockets else 1)
    
            
    def Log(self, message, level=0):
        """
        Fires off a job to write log information to file and the listening sockets.
        """
        timestamp = strftime("%Y-%m-%d %H:%M:%S")
        format_message = "# {0} [{1}]\t: {2}".format(timestamp, self.level_text[level], message)
        if level == 1:
            color = Color.YELLOW
        elif level == 2:
            color = Color.RED
        else:
            color = Color.GREEN
            
            
        sys.stdout.write(color + format_message + Color.END + "\n")
        self.pool.map_async(_log_fs, [(self.fh, format_message)] )
        self.pool.map_async(_log_socket, [(s, format_message) for s in self.sockets])
        
      
    def destroy(self):
       """
       Clean up resources.  Closes all open files and sockets.
       """
       self.Log("Destroying logging system.", level=1)
       self.pool.close()
       # Continue until all operations have finished
       self.fh.close()
       for s in self.sockets:
          s.close()
       self.pool.close()
Beispiel #43
0
def main():
    #TODO clean Logs
    parser = argparse.ArgumentParser(prog='scan.py', description='Pentesting Scan')
    parser.add_argument('-t', dest='TARGET', required=True, help='192.168.1.0/24 or 192.168.1.0-128') #required
    parser.add_argument('-d', dest='DIR', default='results', help='specifiy output-directory')#optional
    args = parser.parse_args()
    
    global logdir
    logdir = args.DIR
    
    checkEnv(args.DIR)
    global targets
    targets = netscan(args.TARGET)
    global db
    db=dict()
    
 
    portscanPool = Pool(4)
    portscanResults = portscanPool.map_async(portscan, targets)
    portscanPool.close()
    portscanPool.join()
    
    ipServiceList= []
    
    
    for ip in db.keys():
        if len(db[ip][1]) == 0 and len(db[ip][0]) == 0:
            print "[INFO ] no open ports in %s, delete key and remove dir" % ip
            del db[ip]
            removeEmptyEnv(ip)
        else:
            for s in range(len(db[ip][1])):
                ipServiceList.append([ip, s])
                
    
    sericePool = Pool(4)
    serviceResults = sericePool.map_async(checkService, ipServiceList)
    sericePool.close()
    sericePool.join()
    
    print "[ END ] "+"="*35+" [ END ]"
Beispiel #44
0
class IoTaskThreadPool(object):
    '''
       thread pool for io operations
    '''
    def __init__(self, poolsize):
        self.ioPool = ThreadPool(poolsize)

    def execTasks(self, ioFunc, ioParams):
        if not ioParams or len(ioParams) == 0:
            return []
        return self.ioPool.map(ioFunc, ioParams)

    def execTasksAsync(self, ioFunc, ioParams):
        if not ioParams or len(ioParams) == 0:
            return []
        self.ioPool.map_async(ioFunc, ioParams)

    def close(self):
        self.ioPool.close()

    def join(self):
        self.ioPool.join()
Beispiel #45
0
def update_signal(update=False):
    SignalTable.metadata.create_all(db)
    id_list = sc.get_sha_index(index=0)
    id_list.extend(sc.get_sza_index(index=0))
    # lock = threading.Lock()
    # for num in id_list:
    #     while threading.activeCount() >= 10:
    #         time.sleep(10)
    #     th = SignalThread(lock, num, db, update)
    #     th.setDaemon(True)
    #     th.start()
    # for t in threading.enumerate():
    #     try:
    #         t.join()
    #     except Exception as ex:
    #         logger.debug('%s' % str(ex))
    #         continue
    lock = multiprocessing.Lock()
    tp = ThreadPool(10)
    tp.map_async(signalprocess,
                 [(lock, num, update) for num in id_list]).get(36000)
    tp.close()
    tp.join()
Beispiel #46
0
    def _start_nodes_parallel(self, nodes, max_thread_pool_size):
        """
        Start the nodes using a pool of multiprocessing threads for speed-up.

        Return set of nodes that were actually started.
        """
        # Create one thread for each node to start
        thread_pool_size = min(len(nodes), max_thread_pool_size)
        thread_pool = Pool(processes=thread_pool_size)
        log.debug("Created pool of %d threads", thread_pool_size)

        # pressing Ctrl+C flips this flag, which in turn stops the main loop
        # down below
        keep_running = True

        def sigint_handler(signal, frame):
            """
            Makes sure the cluster is saved, before the sigint results in
            exiting during node startup.
            """
            log.error(
                "Interrupted: will save cluster state and exit"
                " after all nodes have started.")
            keep_running = False

        # intercept Ctrl+C
        with sighandler(signal.SIGINT, sigint_handler):
            result = thread_pool.map_async(self._start_node, nodes)
            while not result.ready():
                result.wait(1)
                # check if Ctrl+C was pressed
                if not keep_running:
                    log.error("Aborting upon user interruption ...")
                    # FIXME: `.close()` will keep the pool running until all
                    # nodes have been started; should we use `.terminate()`
                    # instead to interrupt node creation as soon as possible?
                    thread_pool.close()
                    thread_pool.join()
                    self.repository.save_or_update(self)
                    # FIXME: should raise an exception instead!
                    sys.exit(1)

            # keep only nodes that were successfully started
            return set(node for node, ok
                       in itertools.izip(nodes, result.get()) if ok)
Beispiel #47
0
class Collector(Singleton):

    def __init__(self):
        self.redis = RedisOperation()
        self.redis.connect()
        self.pool = ThreadPool(10)
        self.pid = os.getpid()
        self.pname = multiprocessing.current_process().name

    def dispatch_work(self, task_data):
        group = task_data.get("group")
        urls = task_data.get("urls")
        if 0 == len(urls):
            LOG.warn("group(%s)'s task list is empty.'" % group)
            return
        LOG.info("pname(%s) pid(%s) receive and do task: %r."
                 % (self.pname, self.pid, task_data))

        map_obj = self.pool.map_async(self.pull_work, urls)
        ret_list = map_obj.get()
        LOG.info("pname(%s) pid(%s) finished group(%s) pull task."
                 %(self.pname, self.pid, group))
        self.finish_task(group, ret_list)

    def pull_work(self, task_url):
        pw = PullWorker(task_url)
        data = pw.run()
        return data

    def finish_task(self, group, ret_list):
        """
        1. compute metric
        2. send metric
        3. redis del group task for task state manage
        """
        task_data = self.redis.get_redis_cache(group)
        LOG.info("pname(%s) pid(%s) start push metric."
                %(self.pname, self.pid))
        pm = PushMetric(ret_list, task_data)
        pm.run()
        LOG.info("pname(%s) pid(%s) push metric finished."
                %(self.pname, self.pid))
        self.redis.delete(group)
        LOG.info("pname(%s) pid(%s) task state manage del "
                "task(%s) finished." %(self.pname, self.pid, group))
Beispiel #48
0
 def last_news(self, game_list):
     """
         Get the last news for every game in your owned games list.
         Sort by ascending date.
         Takes your owned games list as a parameter.
     """
     # Building appid list to feed Pool
     appid = [game['appid'] for game in game_list['response']['games']]
     # Keeping name and icon_url from owned game list
     metadata = []
     for game in game_list['response']['games']:
         icon_url = ("http://media.steampowered.com/steamcommunity/"
                     "public/images/apps/" + str(game['appid']) + "/" +
                     str(game['img_logo_url']) + ".jpg")
         metadata.append({'name' : game['name'],
                          'appid' : game['appid'],
                          'icon_url' : icon_url})
     # Multiprocessing
     news = []
     pool = ThreadPool(8)
     results = pool.map_async(self.get_news, appid, callback=news.extend)
     results.wait()
     pool.close()
     pool.join()
     # Building output
     data = []
     for x,y in zip(news, metadata):
         if x['appnews']['appid'] == y['appid']:
             name = y['name']
             icon_url = y['icon_url']
             content = x['appnews']['newsitems']
             for z in content:
                 news_date = datetime.datetime.fromtimestamp(int(z['date']))
                 if self.now - news_date < datetime.timedelta(weeks=4):
                     data.append({'title': z['title'],
                                  'date': news_date.strftime('%Y-%m-%d'),
                                  'name': name,
                                  'timestamp': z['date'],
                                  'content': z['contents'],
                                  'url': z['url'],
                                  'icon_url': icon_url})
     return sorted(data, key=itemgetter('timestamp'))
Beispiel #49
0
def geoconv_bd(geolist):
    from multiprocessing.dummy import Pool as ThreadPool

    max_geo = 50

    pool = ThreadPool(5)

    grp = [geolist[i:i+max_geo] for i in xrange(0,len(geolist),max_geo)]
    result = pool.map_async(_conv, grp)

    pool.close()
    pool.join()

    ret = []
    for elem in result.get():
        ret.extend(elem)

    print '%d geo converted.' % (len(ret))

    return ret
def fetch_and_extract(gsm_list):
    start = time.time()
    tmp_pathData = os.path.join(os.path.expanduser("~%s" % username), "AppData", "Local", "Temp", "GSM_extractData")

    # Max imap concurrent connections for Office365 : 20
    thread_pool = ThreadPool(max_connections, initialize_connections)
    zip_files = thread_pool.map_async(fetch_zip_file, gsm_list).get()

    # Close connections
    while not connection_queue.empty():
        cn = connection_queue.get()
        cn.close()
        cn.logout()

    # Extracting csv files from .zip
    print("Extracting files ...")
    process_pool = ProcessPool()
    process_pool.map(unzip, zip_files)

    print("Downloads and extraction successfully completed in {0}".format(timedelta(seconds=time.time() - start)))
Beispiel #51
0
def parallel_read(file_names, thread_count):
    logger.debug('start active_threads: %s', threading.activeCount())
    params = [(index, file_name) for index, file_name in enumerate(file_names)]
    pool = ThreadPool(thread_count)
    logger.debug('create active_threads: %s', threading.activeCount())
    try:
        async_results = pool.map_async(
            file_handling.read_file_wrapper, params, chunksize=1)
        results = async_results.get(10)
    except multiprocessing.TimeoutError:
        logger.error('missing results')
        return []
    except:
        logger.exception('underlying thread error')
        return []
    finally:
        pool.close()
        pool.join()
        logger.debug('end active_threads: %s', threading.activeCount())
    return results
Beispiel #52
0
    def _fetch_metadata(self):
        iprogress = cycle(ICHARS)

        def progress():
            sys.stderr.write(CLEAR + 'Fetching metadata %s' % next(iprogress))
            sys.stderr.flush()

        pool = Pool(POOL_SIZE)

        todo = [self.start_date - timedelta(days=i) for i in range(self.days)]

        metadata = pool.map_async(self.get_day_metadata, todo)
        while not metadata.ready():
            progress()
            sleep(.1)

        sys.stderr.write(CLEAR + 'Fetching metadata [ * ]\n')
        sys.stderr.flush()

        return metadata.get()
Beispiel #53
0
def start(self):
    global type_value, threads_value, cid, aid
    pool = ThreadPool(int(threads_value.get()))
    if (type_value.get() == 'Подписчиков'):
        print('followers start')
        aid = getaccountid(link_value.get())
        pool.map_async(follow_n, loginlist)
    if (type_value.get() == 'Лайков'):
        print('likes start')
        cid = getphotoid(link_value.get())
        pool.map_async(like_n, loginlist)
    if (type_value.get() == 'Комментариев'):
        print('comments start')
        cid = getphotoid(link_value.get())
        pool.map_async(comment_n, loginlist)
Beispiel #54
0
    def start(self):
        self.blacklist.load(self.datadir)

        print 'Will fetch %d photo(s) per day for the last %d days' % \
            (self.per_day, self.days)


        to_fetch = self.get_photos()

        # XXX optim listdir
        to_fetch = [photo for photo in to_fetch
                    if not os.path.exists(photo.path)]

        print '%d photos to fetch' % len(to_fetch)

        def progress():
            count = len([_ for _ in to_fetch if _.done])
            dd = '%%%dd' % len(str(len(to_fetch)))
            template = 'Fetching %s/%s photos' % (dd, dd)
            sys.stderr.write('\r' + template % (count, len(to_fetch)))
            sys.stderr.flush()

        pool = Pool(POOL_SIZE)
        res = pool.map_async(self.store_photo, to_fetch)

        while not res.ready():
            progress()
            sleep(.1)
        progress()
        print >> sys.stderr, ''

        self.blacklist.save()

        if not res.successful():
            res.get()

        self.remove_expired()
Beispiel #55
0
    'https://wiki.python.org/moin/LocalUserGroups',
    'http://www.python.org/psf/',
    'http://docs.python.org/devguide/',
    'http://www.python.org/community/awards/'
    ]
#高io同步用线程池
from multiprocessing.dummy import Pool as ThreadPool 
pool = ThreadPool()
results = pool.map(urllib2.urlopen, urls)
pool.close() 
pool.join()

def test(url):
    res = urllib2.urlopen(url)
    return res.read()

#高io同步用线程池
from multiprocessing.dummy import Pool as ThreadPool
import multiprocessing
pool = ThreadPool(multiprocessing.cpu_count())
results = pool.map_async(test, urls)
pool.close()
pool.join()

print results.wait(timeout=10)
print results.successful()
print results.ready()
for item in results.get(timeout=10):
    print item
    
Beispiel #56
0
class TradeApi(object):
    """交易API"""
    HUOBI = 'huobi'
    HADAX = 'hadax'
    
    SYNC_MODE = 'sync'
    ASYNC_MODE = 'async'

    #----------------------------------------------------------------------
    def __init__(self):
        """Constructor"""
        self.accessKey = ''
        self.secretKey = ''
    
        self.mode = self.ASYNC_MODE
        self.active = False         # API工作状态   
        self.reqid = 0              # 请求编号
        self.queue = Queue()        # 请求队列
        self.pool = None            # 线程池
        
    #----------------------------------------------------------------------
    def init(self, host, accessKey, secretKey, mode=None):
        """初始化"""
        if host == self.HUOBI:
            self.hostname = HUOBI_API_HOST
        else:
            self.hostname = HADAX_API_HOST
        self.hosturl = 'https://%s' %self.hostname
            
        self.accessKey = accessKey
        self.secretKey = secretKey
        
        if mode:
            self.mode = mode
            
        self.proxies = {}
        
        return True
        
    #----------------------------------------------------------------------
    def start(self, n=10):
        """启动"""
        self.active = True
        
        if self.mode == self.ASYNC_MODE:
            self.pool = Pool(n)
            self.pool.map_async(self.run, range(n))
        
    #----------------------------------------------------------------------
    def close(self):
        """停止"""
        self.active = False
        self.pool.close()
        self.pool.join()
        
    #----------------------------------------------------------------------
    def httpGet(self, url, params):
        """HTTP GET"""        
        headers = copy(DEFAULT_GET_HEADERS)
        postdata = urllib.urlencode(params)
        
        try:
            response = requests.get(url, postdata, headers=headers, timeout=TIMEOUT)
            if response.status_code == 200:
                return True, response.json()
            else:
                return False, u'GET请求失败,状态代码:%s' %response.status_code
        except Exception as e:
            return False, u'GET请求触发异常,原因:%s' %e
    
    #----------------------------------------------------------------------    
    def httpPost(self, url, params, add_to_headers=None):
        """HTTP POST"""       
        headers = copy(DEFAULT_POST_HEADERS)
        postdata = json.dumps(params)
        
        try:
            response = requests.post(url, postdata, headers=headers, timeout=TIMEOUT)
            if response.status_code == 200:
                return True, response.json()
            else:
                return False, u'POST请求失败,返回信息:%s' %response.json()
        except Exception as e:
            return False, u'POST请求触发异常,原因:%s' %e
        
    #----------------------------------------------------------------------
    def generateSignParams(self):
        """生成签名参数"""
        timestamp = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S')
        d = {
            'AccessKeyId': self.accessKey,
            'SignatureMethod': 'HmacSHA256',
            'SignatureVersion': '2',
            'Timestamp': timestamp
        }    
        
        return d
        
    #----------------------------------------------------------------------
    def apiGet(self, path, params):
        """API GET"""
        method = 'GET'
        
        params.update(self.generateSignParams())
        params['Signature'] = createSign(params, method, self.hostname, path, self.secretKey)
        
        url = self.hosturl + path
        
        return self.httpGet(url, params)
    
    #----------------------------------------------------------------------
    def apiPost(self, path, params):
        """API POST"""
        method = 'POST'
        
        signParams = self.generateSignParams()
        signParams['Signature'] = createSign(signParams, method, self.hostname, path, self.secretKey)
        
        url = self.hosturl + path + '?' + urllib.urlencode(signParams)

        return self.httpPost(url, params)
    
    #----------------------------------------------------------------------
    def addReq(self, path, params, func, callback):
        """添加请求"""       
        # 异步模式
        if self.mode == self.ASYNC_MODE:
            self.reqid += 1
            req = (path, params, func, callback, self.reqid)
            self.queue.put(req)
            return self.reqid
        # 同步模式
        else:
            return func(path, params)
    
    #----------------------------------------------------------------------
    def processReq(self, req):
        """处理请求"""
        path, params, func, callback, reqid = req
        result, data = func(path, params)
        
        if result:
            if data['status'] == 'ok':
                callback(data['data'], reqid)
            else:
                msg = u'错误代码:%s,错误信息:%s' %(data['err-code'], data['err-msg'])
                self.onError(msg, reqid)
        else:
            self.onError(data, reqid)
            
            # 失败的请求重新放回队列,等待下次处理
            self.queue.put(req)
    
    #----------------------------------------------------------------------
    def run(self, n):
        """连续运行"""
        while self.active:    
            try:
                req = self.queue.get(timeout=1)
                self.processReq(req)
            except Empty:
                pass
    
    #----------------------------------------------------------------------
    def getSymbols(self):
        """查询合约代码"""
        if self.hostname == HUOBI_API_HOST:
            path = '/v1/common/symbols'
        else:
            path = '/v1/hadax/common/symbols'

        params = {}
        func = self.apiGet
        callback = self.onGetSymbols
        
        return self.addReq(path, params, func, callback)
    
    #----------------------------------------------------------------------
    def getCurrencys(self):
        """查询支持货币"""
        if self.hostname == HUOBI_API_HOST:
            path = '/v1/common/currencys'
        else:
            path = '/v1/hadax/common/currencys'

        params = {}
        func = self.apiGet
        callback = self.onGetCurrencys
        
        return self.addReq(path, params, func, callback)   
    
    #----------------------------------------------------------------------
    def getTimestamp(self):
        """查询系统时间"""
        path = '/v1/common/timestamp'
        params = {}
        func = self.apiGet
        callback = self.onGetTimestamp
        
        return self.addReq(path, params, func, callback) 
    
    #----------------------------------------------------------------------
    def getAccounts(self):
        """查询账户"""
        path = '/v1/account/accounts'
        params = {}
        func = self.apiGet
        callback = self.onGetAccounts
    
        return self.addReq(path, params, func, callback)         
    
    #----------------------------------------------------------------------
    def getAccountBalance(self, accountid):
        """查询余额"""
        if self.hostname == HUOBI_API_HOST:
            path = '/v1/account/accounts/%s/balance' %accountid
        else:
            path = '/v1/hadax/account/accounts/%s/balance' %accountid
            
        params = {}
        func = self.apiGet
        callback = self.onGetAccountBalance
    
        return self.addReq(path, params, func, callback) 
    
    #----------------------------------------------------------------------
    def getOrders(self, symbol, states, types=None, startDate=None, 
                  endDate=None, from_=None, direct=None, size=None):
        """查询委托"""
        path = '/v1/order/orders'
        
        params = {
            'symbol': symbol,
            'states': states
        }
        
        if types:
            params['types'] = types
        if startDate:
            params['start-date'] = startDate
        if endDate:
            params['end-date'] = endDate        
        if from_:
            params['from'] = from_
        if direct:
            params['direct'] = direct
        if size:
            params['size'] = size        
    
        func = self.apiGet
        callback = self.onGetOrders
    
        return self.addReq(path, params, func, callback)     
    
    #----------------------------------------------------------------------
    def getMatchResults(self, symbol, types=None, startDate=None, 
                  endDate=None, from_=None, direct=None, size=None):
        """查询委托"""
        path = '/v1/order/matchresults'

        params = {
            'symbol': symbol
        }

        if types:
            params['types'] = types
        if startDate:
            params['start-date'] = startDate
        if endDate:
            params['end-date'] = endDate        
        if from_:
            params['from'] = from_
        if direct:
            params['direct'] = direct
        if size:
            params['size'] = size        

        func = self.apiGet
        callback = self.onGetMatchResults

        return self.addReq(path, params, func, callback)   
    
    #----------------------------------------------------------------------
    def getOrder(self, orderid):
        """查询某一委托"""
        path = '/v1/order/orders/%s' %orderid
    
        params = {}
    
        func = self.apiGet
        callback = self.onGetOrder
    
        return self.addReq(path, params, func, callback)             
    
    #----------------------------------------------------------------------
    def getMatchResult(self, orderid):
        """查询某一委托"""
        path = '/v1/order/orders/%s/matchresults' %orderid
    
        params = {}
    
        func = self.apiGet
        callback = self.onGetMatchResult
    
        return self.addReq(path, params, func, callback)     
    
    #----------------------------------------------------------------------
    def placeOrder(self, accountid, amount, symbol, type_, price=None, source=None):
        """下单"""
        if self.hostname == HUOBI_API_HOST:
            path = '/v1/order/orders/place'
        else:
            path = '/v1/hadax/order/orders/place'
        
        params = {
            'account-id': accountid,
            'amount': amount,
            'symbol': symbol,
            'type': type_
        }
        
        if price:
            params['price'] = price
        if source:
            params['source'] = source     

        func = self.apiPost
        callback = self.onPlaceOrder

        return self.addReq(path, params, func, callback)           
    
    #----------------------------------------------------------------------
    def cancelOrder(self, orderid):
        """撤单"""
        path = '/v1/order/orders/%s/submitcancel' %orderid
        
        params = {}
        
        func = self.apiPost
        callback = self.onCancelOrder

        return self.addReq(path, params, func, callback)          
    
    #----------------------------------------------------------------------
    def batchCancel(self, orderids):
        """批量撤单"""
        path = '/v1/order/orders/batchcancel'
    
        params = {
            'order-ids': orderids
        }
    
        func = self.apiPost
        callback = self.onBatchCancel
    
        return self.addReq(path, params, func, callback)     
        
    #----------------------------------------------------------------------
    def onError(self, msg, reqid):
        """错误回调"""
        print msg, reqid
        
    #----------------------------------------------------------------------
    def onGetSymbols(self, data, reqid):
        """查询代码回调"""
        #print reqid, data 
        for d in data:
            print d
    
    #----------------------------------------------------------------------
    def onGetCurrencys(self, data, reqid):
        """查询货币回调"""
        print reqid, data        
    
    #----------------------------------------------------------------------
    def onGetTimestamp(self, data, reqid):
        """查询时间回调"""
        print reqid, data    
        
    #----------------------------------------------------------------------
    def onGetAccounts(self, data, reqid):
        """查询账户回调"""
        print reqid, data     
    
    #----------------------------------------------------------------------
    def onGetAccountBalance(self, data, reqid):
        """查询余额回调"""
        print reqid, data
        for d in data['data']['list']:
            print d
        
    #----------------------------------------------------------------------
    def onGetOrders(self, data, reqid):
        """查询委托回调"""
        print reqid, data    
        
    #----------------------------------------------------------------------
    def onGetMatchResults(self, data, reqid):
        """查询成交回调"""
        print reqid, data      
        
    #----------------------------------------------------------------------
    def onGetOrder(self, data, reqid):
        """查询单一委托回调"""
        print reqid, data    
        
    #----------------------------------------------------------------------
    def onGetMatchResult(self, data, reqid):
        """查询单一成交回调"""
        print reqid, data    
        
    #----------------------------------------------------------------------
    def onPlaceOrder(self, data, reqid):
        """委托回调"""
        print reqid, data
    
    #----------------------------------------------------------------------
    def onCancelOrder(self, data, reqid):
        """撤单回调"""
        print reqid, data          
        
    #----------------------------------------------------------------------
    def onBatchCancel(self, data, reqid):
        """批量撤单回调"""
        print reqid, data      
Beispiel #57
0
class LbankRestApi(object):
    """"""
    
    #----------------------------------------------------------------------
    def __init__(self):
        """Constructor"""
        self.apiKey = ''
        self.secretKey = ''

        self.active = False         # API工作状态   
        self.reqID = 0              # 请求编号
        self.queue = Queue()        # 请求队列
        self.pool = None            # 线程池
        self.sessionDict = {}       # 连接池
        
    #----------------------------------------------------------------------
    def init(self, apiKey, secretKey):
        """初始化"""
        self.apiKey = apiKey
        self.secretKey = secretKey
        
    #----------------------------------------------------------------------
    def start(self, n=10):
        """"""
        if self.active:
            return
        
        self.active = True
        self.pool = Pool(n)
        self.pool.map_async(self.run, range(n))
        
    #----------------------------------------------------------------------
    def close(self):
        """退出"""
        self.active = False
        
        if self.pool:
            self.pool.close()
            self.pool.join()
    
    #----------------------------------------------------------------------
    def processReq(self, req, i):
        """处理请求"""
        # 读取方法和参数
        method, path, params, callback, reqID = req
        url = REST_HOST + path
        
        # 在参数中增加必须的字段
        params['api_key'] = self.apiKey
        params['sign'] = self.generateSignature(params)
        
        # 发送请求
        payload = urllib.urlencode(params)
        
        try:
            # 使用会话重用技术,请求延时降低80%
            session = self.sessionDict[i]
            resp = session.request(method, url, params=payload)
            #resp = requests.request(method, url, params=payload)
            
            code = resp.status_code
            d = resp.json()

            if code == 200:
                callback(d, reqID)
            else:
                self.onError(code, str(d))    

        except Exception as e:
            self.onError(type(e), e.message)   
    
    #----------------------------------------------------------------------
    def run(self, i):
        """连续运行"""
        self.sessionDict[i] = requests.Session()
        
        while self.active:
            try:
                req = self.queue.get(block=True, timeout=1)  # 获取请求的阻塞为一秒
                self.processReq(req, i)
            except Empty:
                pass    
            
    #----------------------------------------------------------------------
    def addReq(self, method, path, params, callback):
        """发送请求"""
        # 请求编号加1
        self.reqID += 1
        
        # 生成请求字典并放入队列中
        req = (method, path, params, callback, self.reqID)
        self.queue.put(req)
        
        # 返回请求编号
        return self.reqID

    #----------------------------------------------------------------------
    def generateSignature(self, params):
        """生成签名"""
        params = sorted(params.iteritems(), key=lambda d:d[0], reverse=False)
        params.append(('secret_key', self.secretKey))
        message = urllib.urlencode(params)
        
        m = hashlib.md5()
        m.update(message)
        m.digest()
    
        sig = m.hexdigest()
        return sig    

    #----------------------------------------------------------------------
    def onError(self, code, msg):
        """错误推送"""
        print(code, msg)

    #----------------------------------------------------------------------
    def onData(self, data, reqID):
        """"""
        print(data, reqID)
Beispiel #58
0
    multiProcessCount = 2 
    hostList=[]
    hostOsTimeList = []
    NewOSFilterInterval = 1 #days
    # add the host waiting for check in hostList 
    hostList.append('10.1.4.100')
    hostList.append('10.1.4.23')
#    print hostList

    cli = "stat /lost+found/ | grep Modify | awk -F ' ' {'print $2,$3,$4'};"
    cli += "exit $?" ## auto logout
    username='******'
    password='******'
    
    pool = Pool(processes=multiProcessCount)
    res=pool.map_async(pssh,((host,username,password,cli) for host in hostList))
    result=res.get()

    import time
    import datetime
    import string
    for output in result:
        if output[1] and output[1] != '' :
            timeArr=output[1].split('\n')[1].split(' ')
            realTimeStruct = time.strptime(timeArr[0]+' '+timeArr[1].split('.')[0],'%Y-%m-%d %H:%M:%S')
            realTime = datetime.datetime(*realTimeStruct[:6])
            osInstallTime_UTC = None
            utcDelta=string.atoi(timeArr[2][1:])
            if '+' in timeArr[2]:
                osInstallTime_UTC = realTime + datetime.timedelta(hours=-1*(utcDelta/100))
            elif '-' in timeArr[2]:
Beispiel #59
0
def query_Id_AuId(id1, auId2, json1, json2):
	#sys.stderr.write('query_AuId_Id ' + str(id1) + ' ' + str(auId2) + '\n')
	print 'query_Id_AuId', id1, auId2
	ans = []
	
	#now = time.time()
	#json1 = getPaperJson(id1, 'RId,F.FId,J.JId,C.CId,AA.AuId,AA.AfId')
	#print 'time use: ', time.time() - now	
	#now = time.time()
	#url = 'https://oxfordhk.azure-api.net/academic/v1.0/evaluate?expr=Composite(AA.AuId=%d)&count=20000&attributes=Id,F.FId,J.JId,C.CId,AA.AuId&orderby=D:asc&subscription-key=f7cc29509a8443c5b3a5e56b0e38b5a6'%auId2
	#json2 = json.loads((urllib.urlopen(url)).read())['entities']

	# Prepare for Id-AA.AuId-AA.AfId-AuId
	AFIdSet2 = set()
	for paper in json2:
		if paper.has_key('AA'):
			for author in paper['AA']:
				if author['AuId'] == auId2 and author.has_key('AfId'):
					AFIdSet2.add(author['AfId'])
	if len(AFIdSet2) > 0:
		authorPaperListResult = []
		pool = Pool(20)
		if json1.has_key('AA'):
			for author in json1['AA']:
				url = 'https://oxfordhk.azure-api.net/academic/v1.0/evaluate?expr=Composite(AA.AuId=%d)&count=3000&attributes=AA.AuId,AA.AfId&orderby=D:desc&subscription-key=f7cc29509a8443c5b3a5e56b0e38b5a6'%author['AuId']
				authorPaperListResult.append(pool.apply_async(lambda url: json.loads((urllib.urlopen(url)).read())['entities'], (url,)))


	paperIdList = map(lambda x:x['Id'], json2)
	paperIdList.sort()

	# =========== 1-hop ===========

	# Id-AuId
	if json1.has_key('AA') and (auId2 in map(lambda x:x['AuId'], json1['AA'])):
		answer(ans, [id1, auId2])

	# =========== 2-hop ===========

	# Id-Id-AuId
	if json1.has_key('RId'):
		RIdList = json1['RId']
		RIdList.sort()
		for RId in join(paperIdList, RIdList):
			answer(ans, [id1, RId, auId2])

	# =========== 3-hop ===========

	# Id-F.FId-Id-AuId
	if json1.has_key('F'):
		FIdList1 = map(lambda x:x['FId'], json1['F'])
		FIdList1.sort()
		for paper in json2:
			if paper.has_key('F'):
				FIdListTmp = map(lambda x:x['FId'], paper['F'])
				FIdListTmp.sort()
				for FId in join(FIdListTmp, FIdList1):
					answer(ans, [id1, FId, paper['Id'], auId2])

	# Id-C.CId-Id-AuId
	if json1.has_key('C'):
		CId1 = json1['C']['CId']
		for paper in json2:
			if paper.has_key('C') and paper['C']['CId'] == CId1:
				answer(ans, [id1, CId1, paper['Id'], auId2])

	# Id-J.JId-Id-AuId
	if json1.has_key('J'):
		JId1 = json1['J']['JId']
		for paper in json2:
			if paper.has_key('J') and paper['J']['JId'] == JId1:
				answer(ans, [id1, JId1, paper['Id'], auId2])

	# Id-AA.AuId-Id-AuId
	if json1.has_key('AA'):
		AuIdList1 = map(lambda x:x['AuId'], json1['AA'])
		AuIdList1.sort()
		for paper in json2:
			if paper.has_key('AA'):
				AuIdListTmp = map(lambda x:x['AuId'], paper['AA'])
				AuIdListTmp.sort()
				for AuId in join(AuIdListTmp, AuIdList1):
					answer(ans, [id1, AuId, paper['Id'], auId2])

	# Id-Id-Id-AuId
	if json1.has_key('RId'):
		pool = Pool(20)
		citePaperInfoResults = pool.map_async(lambda x:getPaperJson(x, 'RId'), RIdList)
		pool.close()
		#pool.join()
		citePaperInfos = citePaperInfoResults.get()
		for citePaperInfo in citePaperInfos:
			if citePaperInfo.has_key('RId'):
				RIdListTmp = citePaperInfo['RId']
				RIdListTmp.sort()
				for comRId in join(RIdListTmp, paperIdList):
					answer(ans, [id1, RId, comRId, auId2])

	# Id-AA.AuId-AA.AfId-AuId
	if len(AFIdSet2) > 0:
		uniqueSet = set()
		idx = 0
		if json1.has_key('AA'):
			for author in json1['AA']:
				auId1 = author['AuId']
				authorPaperList = authorPaperListResult[idx].get()
				idx += 1
				for paper in authorPaperList:
					if paper.has_key('AA'):
						for author2 in paper['AA']:
							if author2['AuId'] == auId1 and author2.has_key('AfId') and (author2['AfId'] in AFIdSet2) and not((auId1, author2['AfId']) in uniqueSet):
								answer(ans, [id1, auId1, author2['AfId'], auId2])
								uniqueSet.add((auId1, author2['AfId']))

	#AFIdSet2 = set()
	#for paper in json2:
	#	if paper.has_key('AA'):
	#		for author in paper['AA']:
	#			if author['AuId'] == auId2 and author.has_key('AfId'):
	#				AFIdSet2.add(author['AfId'])
	#if json1.has_key('AA'):
	#	for author in json1['AA']:
	#		if author.has_key('AfId'):
	#			if author['AfId'] in AFIdSet2:
	#				answer(ans, [id1, author['AuId'], author['AfId'], auId2])

	#print 'time use2: ', time.time() - now
	return ans