Example #1
0
def test_BLAS():
    """
    Simple test asserting that it works with BLAS which is problematic for a
    number of reasons.

    If this test returns it gives some confidence that the machine is able to
    run the type of parallel processing implemented in LASIF.
    """
    results = parallel_map(__random_fct_2, [{}] * 4, n_jobs=2)
    assert len(results) == 4
Example #2
0
def launch_processing(data_generator, log_filename=None, waiting_time=4.0,
                      process_params=None):
    """
    Launch the parallel processing.

    :param data_generator: A generator yielding file information as required.
    :param log_filename: If given, a log will be written to that file.
    :param waiting_time: The time spent sleeping after the initial message has
        been printed. Useful if the user should be given the chance to cancel
        the processing.
    :param process_params: If given, the processing parameters will be written
        to the logfile.
    """
    logger = ColoredLogger(log_filename=log_filename)

    logger.info("Launching preprocessing using all processes...\n"
                "This might take a while. Press Ctrl + C to cancel.\n")

    # Give the user some time to read the message.
    time.sleep(waiting_time)
    results = parallel_map(preprocess_file,
                           ({"processing_info": i} for i in data_generator),
                           verbose=50, pre_dispatch="all")

    # Keep track of all files.
    successful_file_count = 0
    warning_file_count = 0
    failed_file_count = 0
    total_file_count = len(results)

    for result in results:
        if result.exception is not None:
            filename = result.func_args["file_info"]["data_path"]
            msg = "Exception processing file '%s'. %s\n%s" % (
                filename, result.exception, result.traceback)
            logger.error(msg)
            failed_file_count += 1
        elif result.warnings:
            warning_file_count += 1
        else:
            successful_file_count += 1

    return {
        "failed_file_count": failed_file_count,
        "warning_file_count": warning_file_count,
        "total_file_count": total_file_count,
        "successful_file_count": successful_file_count}
Example #3
0
def test_parallel_map():
    """
    Test the parallel mapping method.
    """
    def input_generator():
        yield {"a": 2, "b": 1}  # results in 2
        yield {"a": 4, "b": 0}  # results in None, an exception,
        # and a traceback.
        yield {"a": 1, "b": 1, "c": 1}  # results in 1 and two warnings.
        raise StopIteration

    results = parallel_map(__random_fct, input_generator())

    # Sort them with the expected result to be able to compare them. The order
    # is not guaranteed when using multiple processes.
    results.sort(key=lambda x: x.result)

    assert results[0].result is None
    assert results[0].func_args == {"a": 4, "b": 0, "c": 0}
    assert results[0].warnings == []
    assert type(results[0].exception) is ZeroDivisionError
    assert "ZeroDivisionError" in results[0].traceback

    assert results[1].result == 1
    assert results[1].func_args == {"a": 1, "b": 1, "c": 1}
    assert results[1].exception is None
    assert results[1].traceback is None
    assert len(results[1].warnings) == 2
    assert results[1].warnings[0].category is SyntaxWarning
    assert results[1].warnings[1].category is UserWarning
    assert str(results[1].warnings[0].message) == "First Warning"
    assert str(results[1].warnings[1].message) == "Second Warning"

    assert results[2].result == 2
    assert results[2].func_args == {"a": 2, "b": 1, "c": 0}
    assert results[2].warnings == []
    assert results[2].exception is None
    assert results[2].traceback is None