Пример #1
0
class Core(object):

    def __init__(self, bot):
        self.bot = bot
        self.timeout = int(self.bot.config.get('timeout'))
        self.ping_queue = Queue(loop=bot.loop)

    def connection_made(self):
        self.bot.loop.call_later(self.timeout, self.check_ping)
        self.ping_queue.put_nowait(self.bot.loop.time())

    def check_ping(self):  # pragma: no cover
        # check if we received a ping
        # reconnect if queue is empty
        self.bot.log.debug(
            'Ping queue size: {}'.format(self.ping_queue.qsize()))
        if self.ping_queue.empty():
            self.bot.loop.call_soon(self.bot.protocol.transport.close)
        else:
            self.bot.loop.call_later(self.timeout, self.check_ping)
        while not self.ping_queue.empty():
            self.ping_queue.get_nowait()

    @event(rfc.PING)
    def pong(self, data):
        """PING reply"""
        self.ping_queue.put_nowait(self.bot.loop.time())
        self.bot.send('PONG ' + data)

    @event(rfc.NEW_NICK)
    def recompile(self, nick=None, new_nick=None, **kw):
        """recompile regexp on new nick"""
        if self.bot.nick == nick.nick:
            self.bot.config['nick'] = new_nick
            self.bot.recompile()

    @event(rfc.ERR_NICK)
    def badnick(self, me=None, nick=None, **kw):
        """Use alt nick on nick error"""
        if me == '*':
            self.bot.set_nick(self.bot.nick + '_')
        self.bot.log.debug('Trying to regain nickname in 30s...')
        self.bot.loop.call_later(30, self.bot.set_nick, self.bot.original_nick)

    @event(rfc.RPL_ENDOFMOTD)
    def autojoin(self, **kw):
        """autojoin at the end of MOTD"""
        self.bot.config['nick'] = kw['me']
        self.bot.recompile()
        channels = utils.as_list(self.bot.config.get('autojoins', []))
        for channel in channels:
            channel = utils.as_channel(channel)
            self.bot.log.info('Trying to join %s', channel)
            self.bot.join(channel)
Пример #2
0
class Port(object):
    def __init__(self, tag="data", maxsize=1, name=None, loop=None):
        loop = loop if loop is not None else asyncio.get_event_loop()
        self.loop = loop
        self.name = name if name is not None else str(uuid1())
        self._queue = Queue(maxsize, loop=self.loop)
        self.default_value = None
        self.default_value_set = False
        self.connected = False
        self.belong_to_block = None
        self.data_tag = tag

    def set_default_value(self, value):
        if not isinstance(value, Payload):
            raise Exception("value should be Payload type")
        self.default_value = value
        self.default_value_set = True

    async def get(self):
        if self.default_value_set:
            if self._queue.empty():
                return self.default_value, self.default_value[self.data_tag]

        payload = await self._queue.get()
        return payload, payload[self.data_tag]

    def get_nowait(self):
        if self.default_value_set:
            if self._queue.empty():
                return self.default_value, self.default_value[self.data_tag]

        payload = self._queue.get_nowait()
        return payload, payload[self.data_tag]

    async def put(self, payload, item):
        if self.connected:
            payload[self.data_tag] = item
            await self._queue.put(payload)

    def put_nowait(self, payload, item):
        if self.connected:
            payload[self.data_tag] = item
            self._queue.put_nowait(payload)

    def empty(self):
        return self._queue.empty()

    def full(self):
        return self._queue.full()

    def set_buffer_size(self, maxsize):
        self._queue = Queue(maxsize, loop=self.loop)
Пример #3
0
class QueueMailBox(MailBox):
    def __init__(self, name, *args, **kwargs):
        super(QueueMailBox, self).__init__(*args, **kwargs)
        self._name = name
        self._queue = Queue()
        self._ready = True

    async def prepare(self):
        self._ready = True

    async def put(self, msg=None):
        if await self.policy():
            self._queue.put_nowait(msg)

    async def size(self):
        return self._queue.qsize()

    async def empty(self):
        return self._queue.empty()

    async def get(self):
        result = None
        result = await self._queue.get()
        return result

    async def policy(self):
        mem_percent = psutil.virtual_memory().percent
        if mem_percent > 80:
            logger.warning("memory usage is gt than 80")
        return True
Пример #4
0
async def test_commit_concurrency(aconn):
    # Check the condition reported in psycopg2#103
    # Because of bad status check, we commit even when a commit is already on
    # its way. We can detect this condition by the warnings.
    notices = Queue()
    aconn.add_notice_handler(
        lambda diag: notices.put_nowait(diag.message_primary))
    stop = False

    async def committer():
        nonlocal stop
        while not stop:
            await aconn.commit()
            await asyncio.sleep(0)  # Allow the other worker to work

    async def runner():
        nonlocal stop
        cur = aconn.cursor()
        for i in range(1000):
            await cur.execute("select %s;", (i, ))
            await aconn.commit()

        # Stop the committer thread
        stop = True

    await asyncio.gather(committer(), runner())
    assert notices.empty(), "%d notices raised" % notices.qsize()
Пример #5
0
class FoundComponentIterator:
    def __init__(self, loop, components_file_request_list):
        self.loop = loop
        self.pending_tasks = components_file_request_list
        self._to_remove = None
        self.done = Queue(loop=self.loop)
        self.add_done_callback()

    def add_done_callback(self):
        for task in self.pending_tasks:
            task.add_done_callback(self.on_completion)

    def on_completion(self, task):
        self.pending_tasks.remove(task)
        self.done.put_nowait(task)

    async def cancel_pending_tasks(self):
        for task in self.pending_tasks:
            task.cancel()
        if len(self.pending_tasks):
            await asyncio.wait(self.pending_tasks)
        while not self.done.empty():
            task = self.done.get_nowait()
            try:
                task.result()
            except:
                pass

    def __aiter__(self):
        return self

    async def __anext__(self):
        try:
            while len(self.pending_tasks) > 0 or not self.done.empty():
                try:
                    future = await self.done.get()
                    component_key, fetched_files = await future
                    self._to_remove = future
                    if len(fetched_files) > 0:
                        return {'key': component_key, 'files': fetched_files}
                except (RejectRequest, StopRequest):
                    # Not fatal at all, just one of many
                    pass
        except OfflineHostException:
            await self.cancel_pending_tasks()
            raise
        raise StopAsyncIteration
Пример #6
0
 async def consume(q: Queue) -> None:
     while not q.empty():
         json_file = await q.get()
         await verify(
             master,
             config,
             json_file,
             mirror_base_path,
             all_package_files,
             args,
             executor,
         )
Пример #7
0
async def patched_alerta(service, stop_event, bot_alert_queue: Queue):
    alerta = AlertaRunner(msg_service=service,
                          stop_event=stop_event,
                          send_heartbeats=False)

    def _alert(item):
        bot_alert_queue.put_nowait(item)

    alerta.alert = _alert
    asyncio.create_task(alerta.start())
    await asyncio.sleep(.5)
    yield alerta
    alerta.stop_event.set()
    while not bot_alert_queue.empty():
        bot_alert_queue.get_nowait()
        bot_alert_queue.task_done()
Пример #8
0
class Dumper(BaseDumper):
    """ .DS_Store dumper """
    def __init__(self, url: str, outdir: str, **kwargs):
        super(Dumper, self).__init__(url, outdir, **kwargs)
        self.base_url = re.sub("/\.DS_Store.*", "", url)
        self.url_queue = Queue()

    async def start(self):
        """ dumper 入口方法 """
        # TODO:递归效率还可以优化,不过一般情况下已经够用
        await self.url_queue.put(self.base_url)
        # 递归解析.DS_Store,并把目标URL存到self.targets
        await self.parse_loop()

        await self.dump()

    async def dump(self):
        # 创建协程池,调用download
        task_pool = []
        for target in self.targets:
            task_pool.append(asyncio.create_task(self.download(target)))
        for t in task_pool:
            await t

    async def parse_loop(self):
        """ 从url_queue队列中读取URL,根据URL获取并解析DS_Store """
        while not self.url_queue.empty():
            base_url = await self.url_queue.get()
            status, ds_data = await self.fetch(base_url + "/.DS_Store")
            if status != 200 or not ds_data:
                continue
            try:
                # 解析DS_Store
                ds = dsstore.DS_Store(ds_data)
                for filename in set(ds.traverse_root()):
                    new_url = "%s/%s" % (base_url, filename)
                    await self.url_queue.put(new_url)
                    # 从URL中获取path并删除最前面的/
                    # 不删除/会导致path.join出错,从而导致创建文件失败
                    fullname = urlparse(new_url).path.lstrip("/")
                    self.targets.append((new_url, fullname))
            except Exception as e:
                # 如果解析失败则不是DS_Store文件
                msg = "Failed to parse ds_store file"
                self.error_log(msg=msg, e=e)
Пример #9
0
class IrcConnection(asyncio.Protocol):
    """asyncio protocol to handle an irc connection"""

    def connection_made(self, transport):
        self.transport = transport
        self.closed = False
        self.queue = Queue()

    def data_received(self, data):
        encoding = getattr(self, 'encoding', 'ascii')
        data = data.decode(encoding, 'ignore')
        if not self.queue.empty():
            data = self.queue.get_nowait() + data
        lines = data.split('\r\n')
        self.queue.put_nowait(lines.pop(-1))
        for line in lines:
            self.factory.dispatch(line)

    def write(self, data):
        if data is not None:
            if isinstance(data, text_type):
                data = data.encode(self.encoding)
            if not data.endswith(b'\r\n'):
                data = data + b'\r\n'
            self.transport.write(data)

    def connection_lost(self, exc):  # pragma: no cover
        self.factory.log.critical('connection lost (%s): %r',
                                  id(self.transport),
                                  exc)
        self.factory.notify('connection_lost')
        if not self.closed:
            self.close()
            # wait a few before reconnect
            self.factory.loop.call_later(
                2, self.factory.create_connection, self.__class__)

    def close(self):  # pragma: no cover
        if not self.closed:
            self.factory.log.critical('closing old transport (%r)',
                                      id(self.transport))
            try:
                self.transport.close()
            finally:
                self.closed = True
Пример #10
0
class IrcConnection(asyncio.Protocol):
    """asyncio protocol to handle an irc connection"""
    def connection_made(self, transport):
        self.transport = transport
        self.closed = False
        self.queue = Queue()

    def data_received(self, data):
        encoding = getattr(self, 'encoding', 'ascii')
        data = data.decode(encoding, 'ignore')
        if not self.queue.empty():
            data = self.queue.get_nowait() + data
        lines = data.split('\r\n')
        self.queue.put_nowait(lines.pop(-1))
        for line in lines:
            self.factory.dispatch(line)

    def write(self, data):
        if data is not None:
            if isinstance(data, text_type):
                data = data.encode(self.encoding)
            if not data.endswith(b'\r\n'):
                data = data + b'\r\n'
            self.transport.write(data)

    def connection_lost(self, exc):  # pragma: no cover
        self.factory.log.critical('connection lost (%s): %r',
                                  id(self.transport), exc)
        self.factory.notify('connection_lost')
        if not self.closed:
            self.close()
            # wait a few before reconnect
            self.factory.loop.call_later(2, self.factory.create_connection,
                                         self.__class__)

    def close(self):  # pragma: no cover
        if not self.closed:
            self.factory.log.critical('closing old transport (%r)',
                                      id(self.transport))
            try:
                self.transport.close()
            finally:
                self.closed = True
Пример #11
0
class AsynSpider():
    async def gen_proxy(self):
        while True:
            yield ""

    async def __update_proxy(self):
        if len(self.succeed_proxies) != 0:
            self.proxy = next(iter(self.succeed_proxies))
        else:
            try:
                self.proxy = await self.proxy_gener.__anext__()
            except Exception as e:
                self.logger.warning("没有可用代理!")
                self.proxy = ''

    async def get(self, url, proxy='', retry=5):
        response = None
        # 重试次数
        for i in range(retry):
            try:
                response = await self.session.get(
                    url,
                    headers=self.headers,
                    proxy='' if proxy == None else proxy,
                    timeout=5)
                if 'content-type' in response.headers and 'html' in response.content_type:
                    response.xpath = HTML(await response.text()).xpath
                if response.content_type == 'application/json':
                    response.json_data = await response.json()
                if response.status != 200 or self.except_content_type != None and response.content_type != self.except_content_type:
                    if proxy != None:
                        await self.__update_proxy()
                        proxy = self.proxy
                    continue
                break
            except (Exception, BaseException, TimeoutError) as e:
                if proxy != None:
                    await self.__update_proxy()
                    proxy = self.proxy
                continue
            break
        if response != None and response.status == 200:
            self.succeed_proxies.add(proxy)
        else:
            self.succeed_proxies.discard(self.proxy)
            if proxy != None:
                await self.__update_proxy()
        return response

    async def gen_url(self):
        self.except_queue.put('未实现方法: gen_url(),无法开启爬虫任务。')
        yield None

    async def parse(self, response):
        self.logger.critical('未实现方法: parse(response),将直接返回Response对象')
        return response

    async def save(self, item):
        self.logger.critical('未实现方法: save(item),将直接打印爬取内容。')
        print(item)
        return item

    def __init__(self,
                 name="Simpyder",
                 user_agent="Simpyder ver.{}".format(__VERSION__),
                 interval=0,
                 concurrency=8,
                 log_level='INFO'):
        self.count = 0
        self.finished = False
        self.log_interval = 5
        self.name = name
        self.succeed_proxies = set()
        self.retry = 5
        self.user_agent = user_agent
        self.concurrency = concurrency
        self.interval = interval
        self.log_level = log_level
        self.proxy = ''
        self._url_count = 0
        self._item_count = 0
        self._statistic = []
        self.except_content_type = None
        self.headers = {'user-agent': self.user_agent}
        # self.session = requests.session()
        # self.session.mount('http://', HTTPAdapter(max_retries=3))
        # self.session.mount('https://', HTTPAdapter(max_retries=3))
        self.session = aiohttp.ClientSession()

    def run(self):
        self.logger = _get_logger("{}".format(self.name), self.log_level)
        print("""\033[0;32m
   _____ _  Author: Jannchie         __
  / ___/(_)___ ___  ____  __  ______/ /__  _____
  \__ \/ / __ `__ \/ __ \/ / / / __  / _ \/ ___/
 ___/ / / / / / / / /_/ / /_/ / /_/ /  __/ /
/____/_/_/ /_/ /_/ .___/\__, /\__,_/\___/_/
                /_/    /____/  version: {}\033[0m """.format(__VERSION__))
        self.logger.critical("user_agent: %s" % self.user_agent)
        self.logger.critical("concurrency: %s" % self.concurrency)
        self.logger.critical("interval: %s" % self.interval)
        self.proxy_gener = self.gen_proxy()
        self.loop = asyncio.get_event_loop()
        self.loop.run_until_complete(self._run())
        self.loop.close()

    async def _print_log(self):
        self._statistic.append({
            'url_count': self._url_count,
            'item_count': self._item_count,
            'time': datetime.datetime.now()
        })
        if (len(self._statistic) > 10):
            self._statistic = self._statistic[1:10]
        delta_url_count = self._statistic[-1]['url_count'] - \
            self._statistic[0]['url_count']
        delta_item_count = self._statistic[-1]['item_count'] - \
            self._statistic[0]['item_count']
        delta_seconds = (self._statistic[-1]['time'] -
                         self._statistic[0]['time']).seconds
        url_rate = 0 if delta_seconds == 0 else delta_url_count / \
            (delta_seconds / 60)
        item_rate = 0 if delta_seconds == 0 else delta_item_count / \
            (delta_seconds / 60)

        loading = "[限速基线:{}%]".format(
            int(url_rate / (60 / self.interval) *
                100)) if self.interval != 0 else ""

        self.logger.info("已经爬取{}个链接({}/min),共产生{}个对象({}/min) {}".format(
            self._url_count, int(url_rate), self._item_count, int(item_rate),
            loading))

    async def _auto_print_log(self):
        self._last_url_count = 0
        self._last_item_count = 0
        while self.finished == False:
            await self._print_log()
            await asyncio.sleep(self.log_interval)

    async def crawl_one_url(self, url, proxy):
        try:
            self.logger.debug(f"> Crawl a Url: {url}")
            if type(url) == str and url[0:4] == 'http':
                self.logger.debug(f"下载数据:{url}")
                res = await self.get(url)
                if res == None:
                    self.logger.warning(f"下载数据失败 {url} {proxy}")
            else:
                self.logger.debug(f"非URL直接返回")
                res = url
            self._url_count += 1
            item = await self.parse(res)
            count = await self.save(item)
            if type(count) == int:
                self._item_count += count
            else:
                self._item_count += 1
            self.logger.debug(f"√ Crawl a Url: {url}")
        except Exception as e:
            self.logger.exception(e)

    async def __crawl(self, crawl_sem, lock):
        async with crawl_sem:
            try:
                if not self.url_task_queue.empty():
                    await lock.acquire()
                    self.count += 1
                    try:
                        lock.release()
                        url = await self.url_task_queue.get()
                        await self.crawl_one_url(url, self.proxy)
                        url = self.url_task_queue.task_done()
                    finally:
                        await lock.acquire()
                        self.count -= 1
                        lock.release()
                else:
                    await asyncio.sleep(10)
            except Exception as e:
                self.logger.exception(e)

    async def _run_crawler(self, i):
        try:
            crawl_sem = asyncio.Semaphore(self.concurrency)
            lock = asyncio.Lock()
            self.logger.info(f"Start Crawler: {i}")
            while self.finished == False or not self.url_task_queue.empty():
                await asyncio.sleep(0)
                async with crawl_sem:
                    asyncio.ensure_future(self.__crawl(crawl_sem, lock))
        except Exception as e:
            self.logger.exception(e)

    async def _add_url_to_queue(self):
        url_gener = self.gen_url()
        async for url in url_gener:
            self.logger.debug(f"Crawl Url: {url}")
            await self.url_task_queue.put(url)
            await asyncio.sleep(self.interval)

    async def _run(self):
        self.logger.debug("Spider Task Start")

        self.proxy = await self.proxy_gener.__anext__()

        self.url_task_queue = Queue(30)

        start_time = datetime.datetime.now()
        tasks = []

        print_log = asyncio.ensure_future(self._auto_print_log())

        self.logger.debug("Create Crawl Tasks")

        crawl_task = asyncio.ensure_future(self._run_crawler(0))

        await self._add_url_to_queue()
        await asyncio.sleep(5)
        while not self.url_task_queue.empty() or self.count != 0:
            await asyncio.sleep(5)
        self.finished = True
        await crawl_task
        self.logger.critical("Simpyder任务执行完毕")
        end_time = datetime.datetime.now()
        delta_time = end_time - start_time
        self.logger.critical('累计消耗时间:% s' % str(delta_time))
        self.logger.critical('累计爬取链接:% s' % str(self._url_count))
        self.logger.critical('累计生成对象:% s' % str(self._item_count))

        await print_log
        await self.session.close()
Пример #12
0
class OthelloPlayer:
    def __init__(self,
                 config: Config,
                 client,
                 mode="gui",
                 weight_table=0,
                 c=10,
                 mc=False):
        """
        :param config:
        :param agent.model.OthelloModel|None model:
        :param TreeNode mtcs_info:
        :parameter OthelloModelAPI api:
        """
        self.config = config
        self.client = client
        self.mode = mode
        self.play_config = self.config.play
        self.weight_table = weight_table
        self.c = c
        self.mc = mc

        # mc_tree
        self.num_tree, self.win_tree, self.policy_tree = createTrees()

        # expanded
        self.expanded = set()  #expanded存p(dict)的set形式
        self.now_expanding = set()

        # threads
        self.prediction_queue = Queue(
            self.play_config.prediction_queue_size)  #并行计算的信息队列queue大小
        self.sem = asyncio.Semaphore(
            self.play_config.parallel_search_num)  #限制并行搜索的线程数
        self.loop = asyncio.get_event_loop()

        # for gui
        if self.mode == 'gui':
            self.thinking_history = None  # for fun
            self.avalable = None
            self.allow_resign = False
        elif self.mode == 'self_play':
            self.moves = []
            self.allow_resign = True
        self.test_mode = False
        # params
        self.running_simulation_num = 0

        # solver
        self.solver = OthelloSolver()  #引入minmax树类

    def win_rate(self, node):
        return self.win_tree[node] / (self.num_tree[node] + 1e-5)

# think_and_play

    def think_and_play(self, own, enemy):
        """play tmd:方案:50步以前使用深度學習mctree,若tree到達50步深度后再用minmaxtree; 50步以後直接用minmaxtree
        若搜不到/超時再用之前構建的樹"""
        # renew env
        self.start_time = time.time()
        env = OthelloEnv().update(own, enemy, next_to_play=Stone.black)
        node = create_node(env)

        #五十步之后直接minmax树搜索,若搜索不到,再用深度學習
        if env.epoch >= self.play_config.use_solver_turn:
            logger.warning(f"Entering minmax_tree process")
            ret = self._solver(node)
            if ret:  # not save move as play data
                return ret
        else:  # 五十步之前直接用深度學習
            for t1 in range(self.play_config.thinking_loop
                            ):  # search moves for 3 times
                logger.warning(f"Entering {t1} thinking_loop")
                self._expand_tree(env, node)
                policy, action, value_diff = self._calc_policy_and_action(node)
                # if action 足够大 + n足够大 \ turn 很小
                if env.epoch <= self.play_config.start_rethinking_turn or \
                        (value_diff > -0.01 and self.num_tree[node][action] >= self.play_config.required_visit_to_decide_action):
                    break

            # record or return
            if self.mode == 'gui':
                self._update_thinking_history(own, enemy, action, policy)
                self._update_avalable(own, enemy, action, policy)
            elif self.mode == 'self_play':
                if self.allow_resign:  # resign win_rate 太小没有胜率。
                    if self.play_config.resign_threshold is not None and\
                        np.max(self.win_rate(node)-(self.num_tree[node]==0)*10) <= self.play_config.resign_threshold:
                        if env.epoch >= self.config.play.allowed_resign_turn:
                            return AcNQ(None, 0, 0)  # means resign
                        else:
                            logger.debug(
                                f"Want to resign but disallowed turn {env.epoch} < {self.config.play.allowed_resign_turn}"
                            )
                # save fuckers
                saved_policy = self.__calc_policy_by_prob(
                    node
                ) if self.config.play_data.save_policy_of_tau_1 else policy
                self.__save_data_to_moves(own, enemy, saved_policy)
            return AcNQ(action=action,
                        n=self.num_tree[node][action],
                        q=self.win_rate(node)[action])

    def _solver(self, node):
        # use solver to do minmax搜索
        action, point = self.solver.solve(node.black,
                                          node.white,
                                          Stone(node.next_to_play),
                                          exactly=True)
        if action is None:  #如果沒搜索到是不可以返回None的因爲要
            return None
        else:
            policy = np.zeros(64)
            policy[action] = 1
            update_num_tree_with_one_or_moresides(self.num_tree, node, action,
                                                  ["set"], [999])
            update_win_tree_with_one_or_moresides(self.win_tree, node, action,
                                                  ["set"],
                                                  [np.sign(point) * 999])
            update_policy_tree_with_one_or_moresides(self.policy_tree, node,
                                                     ["set"], [policy])
            self._update_thinking_history(node.black, node.white, action,
                                          policy)
        return AcNQ(action=action, n=999, q=np.sign(point))

    def _expand_tree(self, env, node):
        if env.epoch > 0:  # 对树进行拓展
            self._expand_tree_2(env.chessboard.black, env.chessboard.white)
        else:
            self._set_first_move(node)

    def _expand_tree_2(self, own, enemy):
        # params
        loop = self.loop
        self.running_simulation_num = 0
        # n simulation/move
        coroutine_list = []
        # 200 simulations
        for it in range(self.play_config.simulation_num_per_move):
            coroutine_list.append(self.__start_search_my_move(own, enemy))
        coroutine_list.append(self.__prediction_worker())
        loop.run_until_complete(asyncio.gather(*coroutine_list))

    async def __start_search_my_move(self, own, enemy):
        # set parmas
        self.running_simulation_num += 1
        # wait sems
        with await self.sem:  # 8綫程
            env = OthelloEnv().update(own, enemy, Stone.black)
            leaf_v = await self.___recursive_simulation(env, is_root_node=True)
            self.running_simulation_num -= 1
            return leaf_v

    async def ___recursive_simulation(self,
                                      env: OthelloEnv,
                                      is_root_node=False):
        "fertilize tree process"
        # get both keys
        node, another_side_node = create_both_nodes(env)
        if self.test_mode:
            if (node not in map.keys()):
                map[node] = env.epoch

        # return condition 1
        if env.done:
            if env.result == Result.black:
                return 1
            elif env.result == Result.white:
                return -1
            else:
                return 0

        # return condition 2 : get solver(大于50步,minmax)
        if env.epoch >= self.config.play.use_solver_turn_in_simulation:
            action, point = self.solver.solve(node.black,
                                              node.white,
                                              Stone(node.next_to_play),
                                              exactly=False)
            if action:
                point = point if env.next_to_play == Stone.black else -point
                leaf_v = np.sign(point)
                leaf_p = np.zeros(64)
                leaf_p[action] = 1
                # update tree
                update_num_tree_with_one_or_moresides(self.num_tree, node,
                                                      action, ["plus", "plus"],
                                                      [1, 1])  #走过的位置+1
                update_win_tree_with_one_or_moresides(
                    self.win_tree, node, action, ["plus", "minus"],
                    [leaf_v, leaf_v])  #走此步赢的次数+-1(win)
                update_policy_tree_with_one_or_moresides(
                    self.policy_tree, node, ["set", "set"],
                    [leaf_p, leaf_p])  #此节点应该走的位置(position)
                return np.sign(point)
            if time.time() - self.start_time >= 55:
                return 0
        #return condition 3 : expand tree(小於等於50步,用深度學習)
        while node in self.now_expanding:  # 兩個搜索綫程遇到同一個node,會有衝突的問題
            await asyncio.sleep(self.config.play.wait_for_expanding_sleep_sec)
        # is leaf
        if node not in self.expanded:  # reach leaf node
            leaf_v = await self.____expand_leaf_node(env)
            if env.next_to_play == Stone.black:
                return leaf_v  # Value for black
            else:
                return -leaf_v  # Value for white == -Value for black
        else:  # not leaf do
            virtual_loss_for_w = self.config.play.virtual_loss if env.next_to_play == Stone.black else -self.config.play.virtual_loss
            action_t = self.____decide_action(env, is_root_node)  #UCB公式
            update_num_tree_with_one_or_moresides(
                self.num_tree, node, action_t, ["plus"],
                [self.config.play.virtual_loss])
            update_win_tree_with_one_or_moresides(self.win_tree, node,
                                                  action_t, ["minus"],
                                                  [virtual_loss_for_w])
            env.do(action_t)
            leaf_v = await self.___recursive_simulation(env)  # next move
            # on returning search path
            update_num_tree_with_one_or_moresides(
                self.num_tree, node, action_t, ["plus", "plus"],
                [-self.config.play.virtual_loss + 1, 1])
            update_win_tree_with_one_or_moresides(
                self.win_tree, node, action_t, ["plus", "minus"],
                [virtual_loss_for_w + leaf_v, leaf_v])
            if self.test_mode:
                logger.warning(map[node], leaf_v)
        return leaf_v

    async def ____expand_leaf_node(self, env):
        "use to expand new leaf"
        node, another_side_node = create_both_nodes(env)
        self.now_expanding.add(node)

        # flip + rotate
        rotate_right_num, is_flip_vertical, black_ary, white_ary = flip_and_rotate_board_to_array(
            env.chessboard.black, env.chessboard.white)

        # predict
        state = [
            white_ary, black_ary
        ] if env.next_to_play == Stone.white else [black_ary, white_ary]
        future = await self.predict(np.array(state))  # type: Future
        await future
        leaf_p, leaf_v = future.result()

        # reverse rotate and flip about leaf_p
        leaf_p = flip_and_rotate_result(leaf_p, rotate_right_num,
                                        is_flip_vertical)
        if self.mc:
            black = env.chessboard.black
            leaf_v += np.sum(bit_to_array(black, 64) * self.weight_table)

        # update
        update_policy_tree_with_one_or_moresides(self.policy_tree, node,
                                                 ["set", "set"],
                                                 [leaf_p, leaf_p])
        self.expanded.add(node)
        self.now_expanding.remove(node)
        return leaf_v

    def ____decide_action(self, env, is_root_node):
        # find correct moves
        node = create_node(env)
        legal_moves = find_correct_moves(
            node.black, node.white
        ) if env.next_to_play == Stone.black else find_correct_moves(
            node.white, node.black)

        # vn = formula here
        vn = max(np.sqrt(np.sum(self.num_tree[node])),
                 1)  # SQRT of sum(N(s, b); for all b)

        # p = formula here  re-normalize in legal moves
        vp = self.policy_tree[node]
        vp = vp * bit_to_array(legal_moves, 64)
        temperature = 1
        if np.sum(vp) > 0:
            temperature = min(
                np.exp(1 -
                       np.power(env.epoch / self.config.play.policy_decay_turn,
                                self.config.play.policy_decay_power)), 1)
            vp = normalize(vp, temperature)
        # add noise 0.75*p + 0.25*noise
        if is_root_node and self.play_config.noise_eps > 0:  # Is it correct?? -> (1-e)p + e*Dir(alpha)
            noise = dirichlet_noise_of_mask(legal_moves,
                                            self.play_config.dirichlet_alpha)
            vp = (1 - self.play_config.noise_eps
                  ) * vp + self.play_config.noise_eps * noise

        # u_ = formula here
        vpn = vp * vn / (1 + self.num_tree[node])
        if env.next_to_play == Stone.black:
            vpn_with_weight = (self.win_rate(node) * self.c + vpn + 1000 +
                               self.weight_table) * bit_to_array(
                                   legal_moves, 64)
        else:
            vpn_with_weight = (-self.win_rate(node) * self.c + vpn + 1000 +
                               self.weight_table) * bit_to_array(
                                   legal_moves, 64)
        action_t = int(np.argmax(vpn_with_weight))
        return action_t

    async def __prediction_worker(self):
        " do prediction in this worker"
        margin = 10  # wait for at most 10 epochs x 0.0001
        while self.running_simulation_num > 0 or margin > 0:
            if self.prediction_queue.empty():
                if margin > 0:
                    margin -= 1
                await asyncio.sleep(
                    self.config.play.prediction_worker_sleep_sec)
                continue
            item_list = [
                self.prediction_queue.get_nowait()
                for _ in range(self.prediction_queue.qsize())
            ]  # type: list[QItem]
            data = np.array([x.state for x in item_list])
            policy_ary, value_ary = self.client.forward(
                data)  # shape=(N, 2, 8, 8)
            for p, v, item in zip(policy_ary, value_ary, item_list):
                item.future.set_result((p, v))

    def _set_first_move(self, node):
        # chose the random num_tree = [1] policy_tree = [每个可能的地方都是1/n]
        legal_array = bit_to_array(find_correct_moves(node.black, node.white),
                                   64)
        action = np.argmax(legal_array)
        update_num_tree_with_one_or_moresides(self.num_tree, node, action,
                                              ["set"], [1])
        update_win_tree_with_one_or_moresides(self.win_tree, node, action,
                                              ["set"], [0])
        update_policy_tree_with_one_or_moresides(
            self.policy_tree, node, ["set"],
            [legal_array / np.sum(legal_array)])

    def _calc_policy_and_action(self, node):
        policy = self._calc_policy(
            node.black, node.white
        )  # 先验 最大的n,  前四步p[n]=num_tree[key][n],后面p矩阵只有var_numb最大位置为1,其余为0.
        action = int(np.random.choice(range(64),
                                      p=policy))  #随机走一个点,p为随机取的各点概率(先验)
        action_by_value = int(
            np.argmax(self.win_rate(node) +
                      (self.num_tree[node] > 0) * 100))  #选走过的、q(胜率)最大的那个位置
        value_diff = self.win_rate(node)[action] - self.win_rate(node)[
            action_by_value]  #
        return policy, action, value_diff

    def _calc_policy(self, own, enemy):
        env = OthelloEnv().update(own, enemy, Stone.black)
        node = create_node(env)
        # if turn < 4
        if env.epoch < self.play_config.change_tau_turn:
            return self.__calc_policy_by_prob(node)  # p value
        else:
            return self.__calc_policy_by_max(node)

    def __calc_policy_by_prob(self, node):
        return self.num_tree[node] / np.sum(self.num_tree[node])  # tau = 1

    def __calc_policy_by_max(self, node):
        action = np.argmax(self.num_tree[node])  # tau = 0
        ret = np.zeros(64)  # one hot
        ret[action] = 1
        return ret

    def _update_thinking_history(self, black, white, action, policy):
        node = TreeNode(black, white, Stone.black.value)
        next_key = self.__get_next_key(black, white, action)
        self.thinking_history = \
            LastAcNQ(action, policy, list(self.win_rate(node)), list(self.num_tree[node]),
                        list(self.win_rate(next_key)), list(self.num_tree[next_key]))

    def _update_avalable(self, black, white, action, policy):
        node = TreeNode(black, white, Stone.black.value)
        next_key = self.__get_next_key(black, white, action)
        self.avalable = LastAva(
            find_correct_moves(node.black, node.white),
            find_correct_moves(next_key.white, next_key.black))

    def __get_next_key(self, own, enemy, action):
        env = OthelloEnv().update(own, enemy, Stone.black)
        env.do(action)
        return create_node(env)

    def __save_data_to_moves(self, own, enemy, policy):
        for flip in [False, True]:
            for rot_right in range(4):
                self.moves.append(
                    flip_and_rotate_right(flip, rot_right, own, enemy, policy))

    async def predict(self, x):
        future = self.loop.create_future()
        await self.prediction_queue.put(QItem(x, future))
        return future
Пример #13
0
 async def consume(q: Queue):
     while not q.empty():
         json_file = q.get_nowait()
         await verify(config, json_file, mirror_base_path,
                      all_package_files, args, executor)
Пример #14
0
class Dumper(BaseDumper):
    """ index dumper """
    def __init__(self, url: str, outdir: str, **kwargs):
        super(Dumper, self).__init__(url, outdir, **kwargs)
        self.netloc = urlparse(url).netloc
        self.fetched_urls = []
        self.task_count = 10  # 协程数量
        self.running = False

    async def start(self):
        """ 入口方法 """
        # queue必须创建在run()方法内 https://stackoverflow.com/questions/53724665/using-queues-results-in-asyncio-exception-got-future-future-pending-attached
        self.targets_q = Queue()  # url, name
        await self.targets_q.put((self.url, "index"))
        self.running = True

        tasks = []
        for _ in range(self.task_count):
            tasks.append(asyncio.create_task(self.dump()))
        for t in tasks:
            await t

        self.running = False

    async def dump(self):
        """ 核心下载方法 """
        while self.running:
            # queue.get会一直等待,需要设置超时
            # 但不能使用get_nowait,会变成单任务
            try:
                url, name = await asyncio.wait_for(self.targets_q.get(), 7)
            except Exception as e:
                # self.error_log("Failed to get item form queue.", e=e)
                break
            if url in self.fetched_urls:
                continue
            # 下载保存
            await self.download((url, name))
            self.fetched_urls.append(url)
            # 如果是html则提取链接
            if await self.is_html(url):
                async with aiohttp.ClientSession(
                        connector=self.connector,
                        timeout=self.timeout) as session:
                    try:
                        async with session.get(url,
                                               headers=self.headers) as resp:
                            d = pq(await resp.text())
                            # 遍历链接
                            for a in d("a"):
                                txt = pq(a).text()
                                href = pq(a).attr("href")
                                # 没有文字或链接的不要
                                if not txt or not href:
                                    continue
                                href_parsed = urlparse(href)
                                if href_parsed.netloc:
                                    # 不在同一个域下不要
                                    if href_parsed.netloc != self.netloc:
                                        continue
                                if href_parsed.scheme:
                                    # 不是http协议不要
                                    if not href_parsed.scheme.startswith(
                                            "http"):
                                        continue
                                new_url = urljoin(url, href_parsed.path)
                                fullname = urlparse(new_url).path.lstrip("/")
                                await self.targets_q.put((new_url, fullname))
                                # self.targets_q.put_nowait((new_url, fullname))
                    except Exception as e:
                        msg = "Failed to dump url %s" % url
                        self.error_log(msg=msg, e=e)
                    finally:
                        await session.close()
            if self.targets_q.empty():
                break

    async def is_html(self, url) -> bool:
        """ 判断目标URL是不是属于html页面 """
        async with aiohttp.ClientSession(connector=self.connector,
                                         timeout=self.timeout) as session:
            try:
                async with session.head(url, headers=self.headers) as resp:
                    return bool("html" in resp.headers.get("content-type", ""))
            except Exception as e:
                msg = "Failed to dump url %s" % url
                self.error_log(msg=msg, e=e)
            finally:
                await session.close()
Пример #15
0
async def radar(radar_Q: Queue = None):

    if radar_Q is None:
        print("Radar task is terminating. The radar_Q is None. Nothing to do")
        return

    try:
        q = Queue()

        while True:

            async with async_timeout.timeout(5) as tm:
                while True:
                    try:
                        incoming_action:INCOMING_ACTION_TYPE = await radar_Q.get()
                        q.put_nowait(incoming_action)
                        STATS['number_rx'] += 1
                        STATS['max_incoming_q_size'] = max(radar_Q.qsize(), STATS['max_incoming_q_size'])
                    except Exception as x:
                        break

            if not tm.expired:
                continue

            time_now = datetime.utcnow()

            while not q.empty():

                item = q.get_nowait()

                icao_rec = item.actionMsg

                # print(F"DB_WORKER_RXED: {action_types} {icao_rec}")

                # await db_process_sbs1_msg2(database, sbs1_msg)
                id = item.aircraftId
                last_seen = icao_rec['last_seen']
                data  = icao_rec['current']

                delta_sec = (time_now - last_seen).total_seconds()
                if delta_sec > 300:
                    print(F"@Radar: Got an old one: {id} {last_seen} {delta_sec}")
                    STATS['rx_expired'] += 1
                    remove_track(id)
                    del history[id]
                    continue

                history[id] = last_seen

                STATS['max_history_size'] = max(STATS['max_history_size'],len(history))

                if data['lon'] is not None and data['lat'] is not None:
                    update_track(id,data['lat'], data['lon'])
                    STATS['updates'] += 1

                update_table_store(id, last_seen, data)
                render_table_store()
            # endwhile

            # cleanup any lingering tracks:
            flush()

            refresh()


    except CancelledError:
            print("Cancellling radar task")

    except Exception as x:
        print(F"Exception {x}")
Пример #16
0
class Dumper(BasicDumper):
    """ index dumper """
    def __init__(self, url: str, outdir: str):
        super(Dumper, self).__init__(url, outdir)
        self.netloc = urlparse(url).netloc
        self.fetched_urls = []
        self.task_count = 10  # 协程数量
        self.running = False

    async def start(self):
        """ 入口方法 """
        # queue必须创建在run()方法内 https://stackoverflow.com/questions/53724665/using-queues-results-in-asyncio-exception-got-future-future-pending-attached
        self.targets_q = Queue()  # url, name
        await self.targets_q.put((self.url, "index"))
        self.running = True

        tasks = []
        for i in range(self.task_count):
            tasks.append(asyncio.create_task(self.dump()))
        for t in tasks:
            await t

    async def dump(self):
        """ 核心下载方法 """
        while self.running:
            url, name = await self.targets_q.get()
            if url in self.fetched_urls:
                continue
            try:
                if await self.is_html(url):
                    # 如果是html则提取链接
                    async with aiohttp.ClientSession() as session:
                        async with session.get(url,
                                               headers=self.headers) as resp:
                            d = pq(await resp.text())
                            # 遍历链接
                            for a in d("a"):
                                txt = pq(a).text()
                                href = pq(a).attr("href")
                                href_parsed = urlparse(href)
                                if not txt:  # 没有文字的不要
                                    continue
                                if href_parsed.netloc:
                                    if href_parsed.netloc != self.netloc:  # 不在同一个域下不要
                                        continue
                                if href_parsed.scheme:
                                    if not href_parsed.scheme.startswith(
                                            "http"):  # 不是http协议不要
                                        continue
                                new_url = urljoin(url, href_parsed.path)
                                fullname = urlparse(new_url).path.lstrip("/")
                                await self.targets_q.put((new_url, fullname))
                else:
                    # 如果不是html则下载保存
                    await self.download((url, name))
                self.fetched_urls.append(url)
            except Exception as e:
                click.secho("Dump %s failed" % url, fg="red")
                print(e)
            if self.targets_q.empty():
                break

    async def is_html(self, url) -> bool:
        """ 判断目标URL是不是属于html页面 """
        async with aiohttp.ClientSession() as session:
            async with session.head(url, headers=self.headers) as resp:
                return bool("html" in resp.headers.get("content-type", ""))