Exemple #1
0
def initialize_logger(filename,
                      log_level_print,
                      log_level_file,
                      allow_override=False):
    log_level_print = logging._levelNames[log_level_print.upper()]
    log_level_file = logging._levelNames[log_level_file.upper()]
    util.initialize_root_logger(filename,
                                log_level_print,
                                log_level_file,
                                allow_override=allow_override)
Exemple #2
0
def create_undirected_graph_storage_from_treewidth_experiments(
        input_pickle_file, output_pickle_file, min_tw, max_tw, min_nodes,
        max_nodes, min_conn_prob, max_conn_prob):
    util.ExperimentPathHandler.initialize()
    file_basename = os.path.basename(input_pickle_file).split(".")[0].lower()
    log_file = os.path.join(
        util.ExperimentPathHandler.LOG_DIR,
        "creation_undirected_graph_storage_from_treewidth_{}.log".format(
            file_basename))
    util.initialize_root_logger(log_file)

    # get root logger
    logger = logging.getLogger()

    graph_storage = datamodel.UndirectedGraphStorage(
        parameter_name="treewidth")

    input_contents = None
    logger.info("Reading file {}".format(input_pickle_file))
    with open(input_pickle_file, "rb") as f:
        input_contents = pickle.load(f)

    for number_of_nodes in list(input_contents.keys()):
        logger.info("Handling graphs stored for number of nodes {}".format(
            number_of_nodes))
        data_for_nodes = input_contents[number_of_nodes]
        for connection_probability in list(data_for_nodes.keys()):
            if connection_probability < min_conn_prob or connection_probability > max_conn_prob:
                continue
            list_of_results = data_for_nodes[connection_probability]
            for treewidth_computation_result in list_of_results:
                result_tw = treewidth_computation_result.treewidth
                if result_tw is None:
                    continue
                if result_tw < min_tw or result_tw > max_tw:
                    continue
                undirected_edge_representation = treewidth_computation_result.undirected_graph_edge_representation
                if undirected_edge_representation is None:
                    continue
                graph_storage.add_graph_as_edge_representation(
                    result_tw, undirected_edge_representation)

    logger.info("Writing file {}".format(output_pickle_file))
    with open(output_pickle_file, "wb") as f:
        pickle.dump(graph_storage, f)
Exemple #3
0
def execute_treewidth_computation_experiment(yaml_parameter_file, threads, timeout, remove_intermediate_solutions):
    click.echo('Generate Scenarios for evaluation of the treewidth model')

    util.ExperimentPathHandler.initialize()

    if timeout <= 0:
        timeout = None

    file_basename = os.path.basename(yaml_parameter_file.name).split(".")[0].lower()
    log_file = os.path.join(util.ExperimentPathHandler.LOG_DIR, "{}_parent.log".format(file_basename))
    output_file = os.path.join(util.ExperimentPathHandler.OUTPUT_DIR,
                               "{}_results_{{process_index}}.pickle".format(file_basename))
    util.initialize_root_logger(log_file)
    treewidth_computation_experiments.run_experiment_from_yaml(yaml_parameter_file,
                                                               output_file,
                                                               threads,
                                                               timeout,
                                                               remove_intermediate_solutions)
Exemple #4
0
def pytest_configure(config):
    log_file = config.getoption('log_file')
    if log_file is None:
        log_file = config.getini('log_file')

    log_level_opt = config.getoption('log_level')
    if log_level_opt is None:
        log_level_opt = config.getini('log_level')
    if log_level_opt:
        log_level_file = getattr(logging, log_level_opt.upper(), None)
        if not log_level_file:
            raise RuntimeError('Invalid log-level option: {}'.format(log_level_opt))
    else:
        log_level_file = logging.DEBUG

    log_level_cli_opt = config.getoption("log_cli_level")
    if log_level_cli_opt is None:
        log_level_cli_opt = config.getini("log_cli_level")
    if log_level_cli_opt:
        log_level_cli = getattr(logging, log_level_cli_opt.upper(), None)
        if not log_level_cli:
            raise RuntimeError(f"Invalid log-level option: {log_level_cli_opt}")
    else:
        log_level_cli = None

    if log_file:
        if 'PYTEST_XDIST_WORKER' in os.environ:
            #parallelization is enabled --> create folder
            worker_string = os.environ.get('PYTEST_XDIST_WORKER', '-1')
            numeric_worker_string = int("".join([a for a in worker_string if a.isdigit()]))
            if numeric_worker_string > -1:
                sleep(1)        #to force the main process to create the directory

            actual_log_file = log_file[0:-4] + f"_worker_{numeric_worker_string}" + ".log"

            counter = 0
            directory = "log_{}".format(datetime.datetime.now().strftime("%Y_%m_%d_%H"))
            actual_directory = directory + "_{:04d}".format(counter)

            # search for a directory name where the respective log file was not yet created
            while os.path.exists(actual_directory) and os.path.exists(os.path.join(actual_directory, actual_log_file)):
                counter += 1
                actual_directory = directory + "_{:04d}".format(counter)
            if not os.path.exists(actual_directory):
                #this should hopefully only be created by the main process
                os.mkdir(actual_directory)

            actual_log_file = os.path.join(actual_directory,actual_log_file)

            util.initialize_root_logger(actual_log_file, print_level=log_level_cli, file_level=log_level_file,
                                        allow_override=True)

            logging.getLogger().info(f"Setting up logging in file {actual_log_file} ({log_level_file}) and via cli ({log_level_cli is not None}) ({log_level_cli})")

        else:
            counter = 0
            directory = "log_{}".format(datetime.datetime.now().strftime("%Y_%m_%d_%H"))
            actual_directory = directory + "_{:04d}".format(counter)

            # search for a directory that does not yet exist
            while os.path.exists(actual_directory):
                counter += 1
                actual_directory = directory + "_{:04d}".format(counter)
            os.mkdir(actual_directory)

            actual_log_file = os.path.join(actual_directory, log_file)

            util.initialize_root_logger(actual_log_file, print_level=log_level_cli, file_level=log_level_file,
                                        allow_override=True)
            logging.getLogger().info(f"Setting up logging in file {actual_log_file} ({log_level_file}) and via cli ({log_level_cli is not None}) ({log_level_cli})")
    else:
        if log_file:
            actual_log_file = os.path.join("./", log_file)
        else:
            actual_log_file = None

        util.initialize_root_logger(actual_log_file, print_level=log_level_cli, file_level=log_level_file,
                                    allow_override=True)
        logging.getLogger().info(
            f"Setting up logging in file {actual_log_file} ({log_level_file}) and via cli ({log_level_cli is not None}) ({log_level_cli})")