Пример #1
0
def test_process_data():
    def fff():
        print("fff")

    _handle, benchmark_file = tempfile.mkstemp()
    try:
        for _i in range(55):
            lexnlp_tests.benchmark(lexnlp_tests.build_extraction_func_name(fff), fff, benchmark_file=benchmark_file)
        res = []

        def process(actions: List[Dict]):
            res.extend(actions)

        upload_benchmarks.process_data(benchmark_file, 'index2', process)

        d1 = res[0]
        assert_equals('fff(text)', d1['_source']['function'])
    finally:
        if benchmark_file:
            os.remove(benchmark_file)
Пример #2
0
    def test_process_data(self):
        def fff():
            print("fff")

        _handle, benchmark_file = tempfile.mkstemp()
        try:
            for _i in range(55):
                lexnlp_tests.benchmark(
                    lexnlp_tests.build_extraction_func_name(fff),
                    fff,
                    benchmark_file=benchmark_file)
            res = []

            # pylint: disable=unnecessary-lambda
            upload_benchmarks.process_data(benchmark_file, 'index2',
                                           lambda actions: res.extend(actions))

            d1 = res[0]
            self.assertEqual('fff(text)', d1['_source']['function'])
        finally:
            if benchmark_file:
                os.remove(benchmark_file)
def test_file_3():
    """
    Test using sample file #2.
    :return:
    """
    # Open file
    base_path = get_module_path()

    with open(os.path.join(base_path, "../test_data", "1100644_2016-11-21"), "rb") as test_file_handle:
        # Read buffer
        file_buffer = test_file_handle.read().decode("utf-8")

        # Parse and count
        sections = list(lexnlp_tests.benchmark('get_sections(file_buffer)', get_sections, file_buffer))
        num_sections = len(sections)

        assert_equal(num_sections, 72)
def test_file_1():
    """
    Test using sample file #1.
    :return:
    """
    # Open file
    base_path = get_module_path()

    with codecs.open(os.path.join(base_path, "../test_data", "1582586_2015-08-31"), encoding='utf8') as test_file_handle:
        # Read buffer
        file_buffer = test_file_handle.read()

        # Parse and count
        sections = list(lexnlp_tests.benchmark('get_sections(file_buffer)', get_sections, file_buffer))
        num_sections = len(sections)

        assert_equal(num_sections, 23)
 def test_file_3(self):
     text = self.get_text('1100644_2016-11-21')
     sections = list(
         lexnlp_tests.benchmark('get_sections(text)', get_sections, text))
     num_sections = len(sections)
     assert_equal(num_sections, 72)
 def test_file_2(self):
     text = self.get_text('1031296_2004-11-04')
     sections = list(
         lexnlp_tests.benchmark('get_sections(text)', get_sections, text))
     num_sections = len(sections)
     assert_equal(num_sections, 11)
 def test_file_1(self):
     text = self.get_text('1582586_2015-08-31')
     sections = list(
         lexnlp_tests.benchmark('get_sections(text)', get_sections, text))
     num_sections = len(sections)
     assert_equal(num_sections, 23)