示例#1
0
def main(args):
  logdirs = tf.gfile.Glob(args.logdirs)
  print(len(logdirs), 'logdirs.')
  assert logdirs
  tf.gfile.MakeDirs(args.outdir)
  pool = multiprocessing.Pool(args.workers)
  lock = multiprocessing.Lock()
  pool.map(functools.partial(process_logdir, lock=lock, args=args), logdirs)
示例#2
0
def test_api(client_key):

    # 1. create test image data and both processing and result queues
    urls = ['https://demo.restb.ai/images/demo/demo-1.jpg',
            'https://demo.restb.ai/images/demo/demo-2.jpg',
            'https://demo.restb.ai/images/demo/demo-3.jpg',
            'https://demo.restb.ai/images/demo/demo-4.jpg',
            'https://demo.restb.ai/images/demo/demo-5.jpg',
            'https://demo.restb.ai/images/demo/demo-6.jpg']
    queue = mp.Queue()
    image_id = 1
    for url in urls:
        for model in __MODELS.keys():
            queue.put(dict(id=image_id, url=url, model=model))
        image_id += 1
    results = mp.Queue()

    # 2. Pick which API endpoint to use (US vs. EU)
    url = __URL_US

    # 3. Define concurrency specific objects
    # stats objects
    lock_stats = mp.Lock()
    counter = mp.Value('i', 0)
    avg_req_time = mp.Value('f', 0)
    time_start = mp.Value('f', 999999999999999)
    time_end = mp.Value('f', 0)

    # 4. Spawn processes/threads to process the images in the queue
    pool = []
    for i in range(__requests_per_second):
        # pass in necessary parameters to thread, including client key, etc.
        p = mp.Process(target=image_process_thread,
                       args=(url, client_key, queue, results,
                             lock_stats, counter, avg_req_time, time_start, time_end))
        pool.append(p)
        p.start()

    # 5. clean-up after queue has been processed with "poison pill"
    while not queue.empty():
        # wait for queue to be processed
        time.sleep(1)
    for i in pool:
        # seed shutdown messages / poison pills
        queue.put(dict(id=-1, url='shutdown', model='shutdown'))
    for p in pool:
        # enforce clean shutdown of threads
        p.join()

    # 6. finally, return accumulated results
    total = time_end.value - time_start.value
    print('[{requests}] requests processed in [{seconds}] seconds with average time [{time}] ms, total throughput: [{throughput}] rps'.format(
        requests=counter.value,
        seconds=str(round(total / 1000.0, 1)),
        time=str(round(avg_req_time.value / counter.value, 0)),
        throughput=str(round(counter.value / (total / 1000.0), 2))
    ))
    return results
示例#3
0
import fnmatch
import functools
import multiprocessing.dummy as multiprocessing
import os
import re
import sys
import traceback

# import imageio
import numpy as np
import skimage.io
import tensorflow as tf
from tensorboard.backend.event_processing import (plugin_event_multiplexer as
                                                  event_multiplexer)

lock = multiprocessing.Lock()


def safe_print(*args, **kwargs):
    with lock:
        print(*args, **kwargs)


def create_reader(logdir):
    reader = event_multiplexer.EventMultiplexer()
    reader.AddRun(logdir, 'run')
    reader.Reload()
    return reader


def extract_values(reader, tag):
示例#4
0
TYC_HOST = 'https://www.tianyancha.com'

SOGOU = [
    '6b-f2--5-----ec---98-------034d1a-7--',
    '18-------b9fd7-c--203-------46-a-5e--',
    '-6-0--3----d-ea---f43b-7-c-8-219----5',
    '-70d--ac----f6-e-4b5--9-----3----21-8',
    '--3-1--45-c-7--2-a-e--b-8-6--0d----f9',
    '----3d658----b4------a01c9-27-e---f--',
    '8---d-7--4-f---2e9--5-1---63--b--ca-0',
    'd49---5---c-6b----a-3------207--8f1-e',
    '7---65-------0-----f--9--12-4e8cbad-3',
    '1-8--fl52--9----e-d4-b-a-c--3--0--7-6']

BREAK_EVENT = mt.Event()
_LOCK = mt.Lock()
_DONE = mt.Value('i', 0)
_FAIL = mt.Value('i', 0)
_EMPTY = mt.Value('i', 0)

logger = src.util.loginit.get_logger('tyc2')


def get_login():
    url = 'https://www.tianyancha.com/cd/login.json'

    login_json = {'mobile': '13606181270',
                  'cdpassword': '******',
                  'loginway': 'PL',
                  'autoLogin': True}
    """
class Runner(object):
    """
        Summary:
            执行模块
        Attributes:
            __cases: case列表, case为组成webApiprocessor的参数元祖
            __tasks: 任务列表, 将待执行的任务放入该列表
            __results: 结果列表, 多线程收集结果的队列

    """
    _lock = multithreading.Lock()

    RUNNING = False  # 当前任务是否正在运行的标记

    def __init__(self, cases=None):
        """

        :param cases: case 列表
        """
        super(Runner, self).__init__()
        self.__cases = cases
        self.__tasks = multithreading.JoinableQueue()
        self.__results = multithreading.Queue()

    def init_cases(self, cases):
        """
        初始化case列表
        :param cases:
        :return:
        """
        self.__cases = cases

    def producing(self):
        """
            Summary:
                任务生产者,生产任务
        :return:
        """

        if self.__cases:
            case_processors = map(interface_process.WebApiProcessor, self.__cases)
            map(self.__tasks.put, case_processors)
            return self.__tasks
        log.logger.error("任务列表为空")
        return None

    def worker(self):
        """
            Summary:
                消费
        :return:
        """

        while True:
            web_processor = self.__tasks.get()
            case_id = web_processor.get_case_id()
            case_name = web_processor.get_case_name()
            url = web_processor.get_case_url()

            log.logger.info("即将处理case:{},URL为{}".format(case_name, url))
            web_processor.make_request()
            log.logger.info("完成处理case:{},URL为{}".format(case_name, url))

            status = True
            # web_processor.assert_all_json_value()
            tests_result = web_processor.get_javascript_tests_result()

            if tests_result is None:  # test_result为空
                with self._lock:
                    html_plugin.HTML_report_manager.add_error(case_name, error=u'返回数据为非json格式')
                log.logger.info("case:{}完成报告写入".format(case_name))
                self.__results.put((case_id, case_name, "", False))

            else:
                test_count = len(tests_result)

                if test_count == 0:
                    with self._lock:
                        html_plugin.HTML_report_manager.add_error(case_name, error=u'没有指定测试字段')
                    log.logger.info("case:{}完成报告写入".format(case_name))
                    self.__results.put((case_id, case_name, "", False))

                for result_key in tests_result:

                    result_key_uni = result_key.decode('utf8')
                    try:
                        assert tests_result[result_key]
                        status = True
                        with self._lock:
                            #  写入报告时,需要同步多线程
                            html_plugin.HTML_report_manager.add_success(case_name, result_key_uni, test_count)

                    except AssertionError:  # 验证失败
                        status = False
                        with self._lock:
                            html_plugin.HTML_report_manager.add_failure(case_name, result_key_uni, test_count)
                    finally:
                        log.logger.info("case:{}完成报告写入".format(case_name))
                        self.__results.put((case_id, case_name, result_key, status))

            self.__tasks.task_done()  # 任务池每完成一个任务,标记一次
        return

    def create_consumers(self, customer_count=0):
        """
            Summary:
                创建消费者
        :return:
        """
        consumer_count = customer_count or multithreading.cpu_count()
        consumer_list = list()
        for _ in range(consumer_count):
            curr_consumer = multithreading.Process(
                target=self.worker, args=())
            curr_consumer.daemon = True  # 设置为守护进程,主进程结束则子进程结束
            consumer_list.append(curr_consumer)
            curr_consumer.start()
        return consumer_list

    def get_results(self):
        """
            Summary:

        :return:
        """
        html_plugin.HTML_report_manager.finalize()  # 报告结尾, 完成报告写入, 创建报告(包括时间戳)
        return self.__results

    def run(self, thread_num=3):
        """

        用例执行,在当前进程内同步执行

        :param thread_num: 并发的线程数
        :return: report_create_time: 测试报告创建时间
        """
        self.RUNNING = True

        self.create_consumers(thread_num)
        tasks = self.producing()

        tasks.join()
        results = self.get_results()
        report_create_time = html_plugin.HTML_report_manager.get_report_create_time()

        while not results.empty():
            result = results.get_nowait()
            case_id, case_name, case_attr, status = result
            log.logger.info("第{}个case:\"{}\"的结果属性\"{}\"验证{}".format(
                case_id, case_name, case_attr, '成功' if status else '失败'))

        self.RUNNING = False
        return report_create_time

    def run_async(self, thread_num=3):
        """
        用例执行, 开启新的线程异步执行

        """

        new_thread = multithreading.Process(target=self.run, args=(thread_num,))
        new_thread.start()
示例#6
0
    bookUrls = getBookUrls(url)
    safePrint(u'found %d books !\n' % len(bookUrls))
    for bookName, bookUrl in filterBooksByUser(bookUrls):
        downloadBook(os.path.join(u'Books', bookName), bookUrl, servers)


def filterBooksByUser(bookUrls):
    for name, url in bookUrls:
        safePrint(name + u'\n')

    while True:
        keyword = raw_input(u'input keyword(regex) to filter books:\n').decode(
            sys.getfilesystemencoding())
        names = [name for name, url in bookUrls if re.search(keyword, name)]
        for name in names:
            safePrint(name + u'\n')
        if raw_input(u'is the book list ok?(y/n):\n') == 'y': break

    book2Url = dict(bookUrls)
    return [(name, book2Url[name]) for name in names]


if __name__ != '__main__': exit(0)
if len(sys.argv) < 2:
    print('Usage : python %s url' % sys.argv[0])
    exit(0)

g_gLock = dummy.Lock()
g_pool = dummy.Pool(16)
downloadBooks(sys.argv[1])
# vim:fileencoding=utf-8

from __future__ import unicode_literals
import urllib2, urllib
import multiprocessing.dummy as dummy
import os, sys

g_netHost = 'localhost:8080'
g_netDirPath = ''
g_localDirPath = ''

g_lock = dummy.Lock()
g_downloadedCount = 0
g_filePathList = []


def saveContent2File(filePath, ifile):
    filePath = g_localDirPath + filePath[len(g_netDirPath):]
    dirs, fname = os.path.split(filePath)
    if not os.path.isdir(dirs):
        os.makedirs(dirs)

    BUF_SIZE = 2**20
    with file(filePath, 'wb') as ofile:
        while True:
            data = ifile.read(BUF_SIZE)
            if not data: break
            ofile.write(data)


def downloadFile(filePath):
    def __init__(self, maxsize=0, calls=1, per=1.0, fuzz=0):
        """
        A thread safe priority queue with a given maximum size and rate
        limit.

        Prioritized items should be tuples of form (priority, item), with
        priority lowest first. Priority determines the order of items
        returned by get().

        If `maxsize` is <= 0, the queue size is infinite (see
        `queue.LifoQueue`).

        The rate limit is described as `calls` `per` time window, with
        `per` measured in seconds. The default rate limit is 1 call per
        second. If `per` is <= 0, the rate limit is infinite.

        To avoid immediately filling the whole queue at startup, an
        extra randomized wait period can be configured with `fuzz`.
        This will cause the RateLimitQueue to wait between 0 and `fuzz`
        seconds before putting the object in the queue. Fuzzing only
        occurs if there is no rate limit waiting to be done.

        Parameters
        ----------
        maxsize : int, optional, default 0
            The number of slots in the queue, <=0 for infinite.

        calls : int, optional, default 1
            The number of call per time unit `per`. Must be at least 1.

        per : float, optional, default 1.0
            The time window for tracking calls, in seconds, <=0 for
            infinite rate limit.

        fuzz: float, options, default 0
            The maximum length (in seconds) of fuzzed extra sleep, <=0
            for no fuzzing

        Examples
        --------

        Basic usage:

            >>> rlq = RateLimitPriorityQueue()
            >>> rlq.put((2, 'second'))
            >>> rlq.put((1, 'first'))
            >>> rlq.get()
            (1, 'first')
            >>> rlq.get()
            (2, 'second')

        A rate limit of 3 calls per 5 seconds:

            >>> rlq = RateLimitPriorityQueue(calls=3, per=5)

        A queue with the default 1 call per second, with a maximum size
        of 3:

            >>> rlq = RateLimitPriorityQueue(3)

        A queue of infinite size and rate limit, equivalent to
        queue.Queue():

            >>> rlq = RateLimitPriorityQueue(per=0)

        A queue with wait time fuzzing up to 1 second so that the queue
        cannot be filled immediately directly after instantiation:

            >>> rlq = RateLimitPriorityQueue(fuzz=1)

        """
        if calls < 1:
            raise ValueError("`calls` must be an integer >= 1")

        super().__init__(maxsize)
        self.calls = int(calls)
        self.per = float(per)
        self.fuzz = float(fuzz)

        self._call_log = queue.Queue(maxsize=self.calls)
        self._pending_get = mp.Lock()
示例#9
0
 def __init__(self, process: Process, underlying=dummy.Queue):
     self.process = process
     self._underlying = underlying()
     self._head = 1
     self.lock = dummy.Lock()
示例#10
0
 def __init__(self, process: Process):
     self.value = 1
     self.process = process
     self.lock = dummy.Lock()