Example #1
0
    def __inner_job():
        start = time.time()
        image_binary = analyze_current_screen_text(
            directory=data_directory,
            compress_level=image_compress_level[0],
            crop_area=crop_areas[game_type]
        )
        if not image_binary:
            print("do not detect question and answers")
            return

        keywords = get_text_from_image(
            image_data=image_binary,
            timeout=timeout
        )
        if not keywords:
            print("text not recognize")
            return

        true_flag, real_question, question, answers = parse_question_and_answer(keywords)

        ### parse for answer
        answers = map(lambda a: a.rsplit(":")[-1], answers)
        answers = list(map(lambda a: a.rsplit(".")[-1], answers))

        print("~" * 60)
        print("{0}\n{1}".format(real_question, "\n".join(answers)))
        print("~" * 60)

        if enable_chrome:
            writer.send(question)
            noticer.set()

        summary = baidu_count(question, answers, timeout=timeout)
        summary_li = sorted(summary.items(), key=operator.itemgetter(1), reverse=True)

        if true_flag:
            recommend = "{0}\n{1}".format(
                "肯定回答(**): {0}".format(summary_li[0][0]),
                "否定回答(  ): {0}".format(summary_li[-1][0]))
        else:
            recommend = "{0}\n{1}".format(
                "肯定回答(  ): {0}".format(summary_li[0][0]),
                "否定回答(**): {0}".format(summary_li[-1][0]))
        print("*" * 60)
        print(recommend)
        print("*" * 60)

        ans = kwquery(real_question)
        print("-" * 60)
        print(wrap(" ".join(ans), 60))
        print("-" * 60)

        end = time.time()
        print("use {0} 秒".format(end - start))

        save_screen(directory=data_directory)
        save_question_answers_to_file(real_question, answers, directory=data_directory)
Example #2
0
    def test_crawler(self):
        """
        Test baidu crawler

        :return:
        """
        from core.crawler.crawl import kwquery
        from core.crawler.crawl import jieba_initialize
        jieba_initialize()
        query = "回锅肉属于什么菜系"
        query = "北京奥运会是什么时候"
        ans = kwquery(query)
        print("~~~~~~~")
        for a in ans:
            print(a)
        print("~~~~~~~")
Example #3
0
    def test_crawler(self):
        """
        Test baidu crawler

        :return:
        """
        from core.crawler.crawl import kwquery
        from core.crawler.crawl import jieba_initialize
        jieba_initialize()
        query = "回锅肉属于什么菜系"
        query = "北京奥运会是什么时候"
        ans = kwquery(query)
        print("~~~~~~~")
        for a in ans:
            print(a)
        print("~~~~~~~")
    def __inner_job():
        start = time.time()
        text_binary = analyze_current_screen_text(
            directory=data_directory,
            compress_level=image_compress_level[0],
            crop_area=crop_areas[game_type],
            use_monitor=use_monitor)
        keywords = get_text_from_image(image_data=text_binary, timeout=timeout)
        if not keywords:
            print("text not recognize")
            return

        true_flag, real_question, question, answers = parse_question_and_answer(
            keywords)

        if game_type == "UC答题":
            answers = map(lambda a: a.rsplit(":")[-1], answers)

        print("~" * 60)
        print("{0}\n{1}".format(real_question, "\n".join(answers)))
        print("~" * 60)

        # ### refresh question
        # stdout_queue.put({
        #     "type": 0,
        #     "data": "{0}\n{1}".format(question, "\n".join(answers))
        # })
        #
        # # notice baidu and craw
        # baidu_queue.put((
        #     question, answers, true_flag
        # ))
        # knowledge_queue.put(question)

        if enable_chrome:
            writer.send(question)
            noticer.set()

        summary = baidu_count(question, answers, timeout=timeout)
        summary_li = sorted(summary.items(),
                            key=operator.itemgetter(1),
                            reverse=True)
        if true_flag:
            recommend = "{0}\n{1}".format(
                "肯定回答(**): {0}".format(summary_li[0][0]),
                "否定回答(  ): {0}".format(summary_li[-1][0]))
        else:
            recommend = "{0}\n{1}".format(
                "肯定回答(  ): {0}".format(summary_li[0][0]),
                "否定回答(**): {0}".format(summary_li[-1][0]))
        print("*" * 60)
        print("\n".join(
            map(lambda item: "{0}: {1}".format(item[0], item[1]), summary_li)))
        print(recommend)
        print("*" * 60)

        ans = kwquery(real_question)
        print("-" * 60)
        print(wrap(" ".join(ans), 60))
        print("-" * 60)

        end = time.time()
        # stdout_queue.put({
        #     "type": 3,
        #     "data": "use {0} 秒".format(end - start)
        # })
        print("use {0} 秒".format(end - start))
        save_screen(directory=data_directory)
        time.sleep(1)
    def __inner_job():
        start = time.time()
        text_binary = analyze_current_screen_text(
            directory=data_directory,
            compress_level=image_compress_level[0],
            crop_area=crop_areas[game_type],
            use_monitor=use_monitor)
        keywords = get_text_from_image(image_data=text_binary, )
        if not keywords:
            print("text not recognize")
            return

        true_flag, real_question, question, answers = parse_question_and_answer(
            keywords)

        ## notice crawler to work
        # qwriter.send(real_question.strip("?"))
        # crawler_noticer.set()

        print('-' * 72)
        print(real_question)
        print('-' * 72)
        print("\n".join(answers))

        # notice browser
        if enable_chrome:
            writer.send(question)
            noticer.set()

        search_question = pre_process_question(question)
        summary = baidu_count(search_question, answers, timeout=timeout)
        summary_li = sorted(summary.items(),
                            key=operator.itemgetter(1),
                            reverse=True)
        data = [("选项", "同比")]
        for a, w in summary_li:
            data.append((a, w))
        table = AsciiTable(data)
        print(table.table)

        print("*" * 72)
        if true_flag:
            print("肯定回答(**): ", summary_li[0][0])
            print("否定回答(  ): ", summary_li[-1][0])
        else:
            print("肯定回答(  ): ", summary_li[0][0])
            print("否定回答(**): ", summary_li[-1][0])
        print("*" * 72)

        # try crawler
        # retry = 4
        # while retry:
        #     if result_noticer.is_set():
        #         print("~" * 60)
        #         print(stdreader.recv())
        #         print("~" * 60)
        #         break
        #     retry -= 1
        #     time.sleep(1)
        # result_noticer.clear()

        print("~" * 60)
        print(kwquery(real_question.strip("?")))
        print("~" * 60)

        end = time.time()
        print("use {0} 秒".format(end - start))
        save_screen(directory=data_directory)