Example #1
0
def apply(log, parameters):
    """
    Apply the IM_F algorithm to a log obtaining a Petri net along with an initial and final marking

    Parameters
    -----------
    log
        Log
    parameters
        Parameters of the algorithm, including:
            Parameters.ACTIVITY_KEY -> attribute of the log to use as activity name
            (default concept:name)

    Returns
    -----------
    net
        Petri net
    initial_marking
        Initial marking
    final_marking
        Final marking
    """

    if pkgutil.find_loader("pandas"):
        import pandas as pd
        from pm4py.statistics.variants.pandas import get as variants_get

        if type(log) is pd.DataFrame:
            vars = variants_get.get_variants_count(log, parameters=parameters)
            return apply_variants(vars, parameters=parameters)

    log = converter.apply(log, parameters=parameters)
    net, initial_marking, final_marking = tree_to_petri.apply(apply_tree(log, parameters))
    return net, initial_marking, final_marking
Example #2
0
def filter_variants_top_k(log, k, parameters=None):
    """
    Keeps the top-k variants of the log

    Parameters
    -------------
    log
        Event log
    k
        Number of variants that should be kept
    parameters
        Parameters

    Returns
    -------------
    filtered_log
        Filtered log
    """
    if parameters is None:
        parameters = {}

    variants = variants_get.get_variants_count(log, parameters=parameters)
    variant_count = []
    for variant in variants:
        variant_count.append([variant, variants[variant]])
    variant_count = sorted(variant_count,
                           key=lambda x: (x[1], x[0]),
                           reverse=True)
    variant_count = variant_count[:min(k, len(variant_count))]
    variants_to_filter = [x[0] for x in variant_count]

    return apply(log, variants_to_filter, parameters=parameters)
Example #3
0
def apply(log, parameters=None):
    """
    Apply the IM algorithm to a log obtaining a Petri net along with an initial and final marking

    Parameters
    -----------
    log
        Log
    parameters
        Parameters of the algorithm, including:
            Parameters.ACTIVITY_KEY -> attribute of the log to use as activity name
            (default concept:name)

    Returns
    -----------
    net
        Petri net
    initial_marking
        Initial marking
    final_marking
        Final marking
    """
    if type(log) is pd.DataFrame:
        vars = variants_get.get_variants_count(log, parameters=parameters)
        return apply_variants(vars, parameters=parameters)
    else:
        log = converter.apply(log, parameters=parameters)
        net, initial_marking, final_marking = tree_to_petri.apply(
            apply_tree(log, parameters))
        return net, initial_marking, final_marking
Example #4
0
def filter_variants_by_coverage_percentage(log,
                                           min_coverage_percentage,
                                           parameters=None):
    """
    Filters the variants of the log by a coverage percentage
    (e.g., if min_coverage_percentage=0.4, and we have a log with 1000 cases,
    of which 500 of the variant 1, 400 of the variant 2, and 100 of the variant 3,
    the filter keeps only the traces of variant 1 and variant 2).

    Parameters
    ---------------
    log
        Event log
    min_coverage_percentage
        Minimum allowed percentage of coverage
    parameters
        Parameters

    Returns
    ---------------
    filtered_log
        Filtered log
    """
    if parameters is None:
        parameters = {}

    case_id_glue = exec_utils.get_param_value(Parameters.CASE_ID_KEY,
                                              parameters, CASE_CONCEPT_NAME)

    variants = variants_get.get_variants_count(log, parameters=parameters)
    allowed_variants = [
        x for x, y in variants.items()
        if y >= min_coverage_percentage * log[case_id_glue].nunique()
    ]

    return apply(log, allowed_variants, parameters=parameters)
Example #5
0
def apply_tree(log, parameters):
    """
    Apply the IM_FF algorithm to a log obtaining a process tree

    Parameters
    ----------
    log
        Log
    parameters
        Parameters of the algorithm, including:
            Parameters.ACTIVITY_KEY -> attribute of the log to use as activity name
            (default concept:name)

    Returns
    ----------
    process_tree
        Process tree
    """
    if parameters is None:
        parameters = {}

    if pkgutil.find_loader("pandas"):
        import pandas as pd
        from pm4py.statistics.variants.pandas import get as variants_get

        if type(log) is pd.DataFrame:
            vars = variants_get.get_variants_count(log, parameters=parameters)
            return apply_tree_variants(vars, parameters=parameters)

    activity_key = exec_utils.get_param_value(Parameters.ACTIVITY_KEY, parameters,
                                              pmutil.xes_constants.DEFAULT_NAME_KEY)

    log = converter.apply(log, parameters=parameters)
    # keep only the activity attribute (since the others are not used)
    log = filtering_utils.keep_only_one_attribute_per_event(log, activity_key)

    noise_threshold = exec_utils.get_param_value(Parameters.NOISE_THRESHOLD, parameters,
                                                 shared_constants.NOISE_THRESHOLD_IMF)

    dfg = [(k, v) for k, v in dfg_inst.apply(log, parameters=parameters).items() if v > 0]
    c = Counts()
    activities = attributes_get.get_attribute_values(log, activity_key)
    start_activities = list(start_activities_get.get_start_activities(log, parameters=parameters).keys())
    end_activities = list(end_activities_get.get_end_activities(log, parameters=parameters).keys())
    contains_empty_traces = False
    traces_length = [len(trace) for trace in log]
    if traces_length:
        contains_empty_traces = min([len(trace) for trace in log]) == 0

    # set the threshold parameter based on f and the max value in the dfg:
    max_value = 0
    for key, value in dfg:
        if value > max_value:
            max_value = value
    threshold = noise_threshold * max_value

    recursion_depth = 0
    sub = subtree.make_tree(log, dfg, dfg, dfg, activities, c, recursion_depth, noise_threshold, threshold,
                            start_activities, end_activities,
                            start_activities, end_activities, parameters=parameters)

    process_tree = get_tree_repr_implain.get_repr(sub, 0, contains_empty_traces=contains_empty_traces)
    # Ensures consistency to the parent pointers in the process tree
    tree_consistency.fix_parent_pointers(process_tree)
    # Fixes a 1 child XOR that is added when single-activities flowers are found
    tree_consistency.fix_one_child_xor_flower(process_tree)
    # folds the process tree (to simplify it in case fallthroughs/filtering is applied)
    process_tree = util.fold(process_tree)

    return process_tree
Example #6
0
def apply_tree(log, parameters=None):
    """
    Apply the IM algorithm to a log obtaining a process tree

    Parameters
    ----------
    log
        Log
    parameters
        Parameters of the algorithm, including:
            Parameters.ACTIVITY_KEY -> attribute of the log to use as activity name
            (default concept:name)

    Returns
    ----------
    process_tree
        Process tree
    """
    if parameters is None:
        parameters = {}

    if type(log) is pd.DataFrame:
        vars = variants_get.get_variants_count(log, parameters=parameters)
        return apply_tree_variants(vars, parameters=parameters)
    else:
        activity_key = exec_utils.get_param_value(
            Parameters.ACTIVITY_KEY, parameters,
            pmutil.xes_constants.DEFAULT_NAME_KEY)

        log = converter.apply(log, parameters=parameters)
        # since basic IM is influenced once per variant, it makes sense to keep one trace per variant
        log = filtering_utils.keep_one_trace_per_variant(log,
                                                         parameters=parameters)
        # keep only the activity attribute (since the others are not used)
        log = filtering_utils.keep_only_one_attribute_per_event(
            log, activity_key)

        dfg = [(k, v)
               for k, v in dfg_inst.apply(log, parameters=parameters).items()
               if v > 0]
        c = Counts()
        activities = attributes_filter.get_attribute_values(log, activity_key)
        start_activities = list(
            start_activities_filter.get_start_activities(
                log, parameters=parameters).keys())
        end_activities = list(
            end_activities_filter.get_end_activities(
                log, parameters=parameters).keys())
        contains_empty_traces = False
        traces_length = [len(trace) for trace in log]
        if traces_length:
            contains_empty_traces = min([len(trace) for trace in log]) == 0

        recursion_depth = 0
        sub = subtree.make_tree(log, dfg, dfg, dfg, activities, c,
                                recursion_depth, 0.0, start_activities,
                                end_activities, start_activities,
                                end_activities, parameters)

        process_tree = get_tree_repr_implain.get_repr(
            sub, 0, contains_empty_traces=contains_empty_traces)
        # Ensures consistency to the parent pointers in the process tree
        tree_consistency.fix_parent_pointers(process_tree)
        # Fixes a 1 child XOR that is added when single-activities flowers are found
        tree_consistency.fix_one_child_xor_flower(process_tree)
        # folds the process tree (to simplify it in case fallthroughs/filtering is applied)
        process_tree = util.fold(process_tree)

        return process_tree