예제 #1
0
    def test_importingExportingPetri(self):
        # to avoid static method warnings in tests,
        # that by construction of the unittest package have to be expressed in such way
        self.dummy_variable = "dummy_value"
        imported_petri1, marking1, fmarking1 = petri_importer.import_net(
            os.path.join(INPUT_DATA_DIR, "running-example.pnml"))
        soundness = check_soundness.check_petri_wfnet_and_soundness(
            imported_petri1)
        del soundness
        petri_exporter.export_net(
            imported_petri1, marking1,
            os.path.join(OUTPUT_DATA_DIR, "running-example.pnml"))
        imported_petri2, marking2, fmarking2 = petri_importer.import_net(
            os.path.join(OUTPUT_DATA_DIR, "running-example.pnml"))
        soundness = check_soundness.check_petri_wfnet_and_soundness(
            imported_petri2)
        del soundness

        self.assertEqual(sorted([x.name for x in imported_petri1.places]),
                         sorted([x.name for x in imported_petri2.places]))
        self.assertEqual(sorted([x.name for x in imported_petri1.transitions]),
                         sorted([x.name for x in imported_petri2.transitions]))
        self.assertEqual(
            sorted(
                [x.source.name + x.target.name for x in imported_petri1.arcs]),
            sorted(
                [x.source.name + x.target.name for x in imported_petri2.arcs]))
        self.assertEqual([x.name for x in marking1],
                         [x.name for x in marking2])
        os.remove(os.path.join(OUTPUT_DATA_DIR, "running-example.pnml"))
    def __init__(self, path):
        """
            :param path: path to directory storing process model file

        """
        self.net, self.initial_marking, self.final_marking = pnml_importer.import_net(
            path)
예제 #3
0
 def test_importingPetriLogAlignment(self):
     # to avoid static method warnings in tests,
     # that by construction of the unittest package have to be expressed in such way
     self.dummy_variable = "dummy_value"
     imported_petri1, marking1, fmarking1 = petri_importer.import_net(
         os.path.join(INPUT_DATA_DIR, "running-example.pnml"))
     soundness = check_soundness.check_petri_wfnet_and_soundness(
         imported_petri1)
     del soundness
     log = xes_importer.import_log(
         os.path.join(INPUT_DATA_DIR, "running-example.xes"))
     final_marking = petri.petrinet.Marking()
     for p in imported_petri1.places:
         if not p.out_arcs:
             final_marking[p] = 1
     for trace in log:
         cf_result = state_equation_a_star.apply(trace, imported_petri1,
                                                 marking1,
                                                 final_marking)['alignment']
         is_fit = True
         for couple in cf_result:
             if not (couple[0] == couple[1]
                     or couple[0] == ">>" and couple[1] is None):
                 is_fit = False
         if not is_fit:
             raise Exception("should be fit")
예제 #4
0
def execute_script():
    log_path = os.path.join("..", "tests", "input_data", "running-example.xes")
    log = xes_importer.apply(log_path)
    # obtain Petri net through Alpha Miner
    net, initial_marking, final_marking = alpha_miner.apply(log)
    # obtain stochastic information for transitions in the model
    s_map = stochastic_map.get_map_from_log_and_net(
        log,
        net,
        initial_marking,
        final_marking,
        force_distribution="EXPONENTIAL")
    # export the current stochastic Petri net
    petri_exporter.export_net(net,
                              initial_marking,
                              "example.pnml",
                              final_marking=final_marking,
                              stochastic_map=s_map)
    # re-import the current stochastic Petri net from file
    net, initial_marking, final_marking, s_map = petri_importer.import_net(
        "example.pnml", return_stochastic_information=True)
    # remove temporary file
    os.remove("example.pnml")
    # gets the reachability graph from the Petri net
    reachab_graph = construct_reachability_graph(net, initial_marking)
    # get the tangible reachability graph from the reachability graph and the stochastic map
    tang_reach_graph = tangible_reachability.get_tangible_reachability_from_reachability(
        reachab_graph, s_map)
    # visualize the tangible reachability graph on the screen
    viz = ts_vis_factory.apply(tang_reach_graph,
                               parameters={
                                   "format": "svg",
                                   "show_labels": True,
                                   "show_names": True
                               })
    ts_vis_factory.view(viz)
    # gets the Q matrix assuming exponential distributions
    q_matrix = ctmc.get_q_matrix_from_tangible_exponential(
        tang_reach_graph, s_map)
    # pick a state to start from
    states = sorted(list(tang_reach_graph.states), key=lambda x: x.name)
    state = states[0]
    print("\n\nstarting from state = ", state.name)
    # do transient analysis after 1 day
    transient_result = ctmc.transient_analysis_from_tangible_q_matrix_and_single_state(
        tang_reach_graph, q_matrix, state, 86400)
    print("\nprobability for each state after 1 day = ", transient_result)
    # do transient analysis after 10 days
    transient_result = ctmc.transient_analysis_from_tangible_q_matrix_and_single_state(
        tang_reach_graph, q_matrix, state, 864000)
    print("\nprobability for each state after 10 days = ", transient_result)
    # do transient analysis after 100 days
    transient_result = ctmc.transient_analysis_from_tangible_q_matrix_and_single_state(
        tang_reach_graph, q_matrix, state, 8640000)
    print("\nprobability for each state after 100 days = ", transient_result)
    steady_state = ctmc.steadystate_analysis_from_tangible_q_matrix(
        tang_reach_graph, q_matrix)
    print("\nsteady state = ", steady_state)
예제 #5
0
 def test_importingPetriLogTokenReplay(self):
     # to avoid static method warnings in tests,
     # that by construction of the unittest package have to be expressed in such way
     self.dummy_variable = "dummy_value"
     imported_petri1, marking1, fmarking1 = petri_importer.import_net(
         os.path.join(INPUT_DATA_DIR, "running-example.pnml"))
     trace_log = xes_importer.import_log(os.path.join(INPUT_DATA_DIR, "running-example.xes"))
     aligned_traces = token_replay.apply_log(trace_log, imported_petri1, marking1, fmarking1)
     del aligned_traces
예제 #6
0
 def test_importingExportingStochasticNet(self):
     # to avoid static method warnings in tests,
     # that by construction of the unittest package have to be expressed in such way
     self.dummy_variable = "dummy_value"
     imported_petri1, marking1, fmarking1, stochastic_info1 = petri_importer.import_net(
         os.path.join(INPUT_DATA_DIR, "stochastic_running_example.pnml"))
     petri_exporter.export_net(imported_petri1, marking1,
                               os.path.join(OUTPUT_DATA_DIR, "stochastic_running_example.pnml"),
                               final_marking=fmarking1)
     os.remove(os.path.join(OUTPUT_DATA_DIR, "stochastic_running_example.pnml"))
예제 #7
0
def execute_script():
    log_path = os.path.join("..", "tests", "input_data", "running-example.xes")
    pnml_path = os.path.join("..", "tests", "input_data",
                             "running-example.pnml")

    # log_path = 'C:/Users/bas/Documents/tue/svn/private/logs/a32_logs/a32f0n05.xes'
    # pnml_path = 'C:/Users/bas/Documents/tue/svn/private/logs/a32_logs/a32.pnml'

    log = xes_importer.import_log(log_path)
    net, marking, fmarking = import_net(pnml_path)

    model_cost_function = dict()
    sync_cost_function = dict()
    for t in net.transitions:
        if t.label is not None:
            model_cost_function[t] = 1000
            sync_cost_function[t] = 0
        else:
            model_cost_function[t] = 1

    alignments = ali.factory.apply(log, net, marking, fmarking)
    print(alignments)
    pretty_print_alignments(alignments)
예제 #8
0
if __name__ == '__main__':
    logger.info("Start script...")
    start_all = time.time()

    configs_df = experiment_configs2df(EXPERIMENT_CONFIGS)
    logger.info(f"Experiment configuration: \n{configs_df}")

    results_dirname = get_results_dirname(EXPERIMENT_CONFIGS)
    results_dir = os.path.join(RESULT_DIR, results_dirname)
    os.makedirs(results_dir)

    time_dict = dict()

    logger.info("Importing data...")
    start_import = time.time()
    net, init_marking, final_marking = pnml_importer.import_net(MODEL_FP)
    net_orig, init_marking_orig, final_marking_orig = pnml_importer.import_net(
        MODEL_FP)
    event_df = pd.read_csv(DATA_FP)
    took_import = time.time() - start_import
    time_dict[TIME_IMPORT_DATA] = took_import
    logger.info(f"Importing data took: {took_import:.3f}s")

    logger.info(f"Event df shape: {event_df.shape}")

    logger.info('Mapping activity to integer labels...')
    obs2int = event_df[[ACTIVITY, ACTIVITY_ID]].set_index(ACTIVITY)
    obs2int = obs2int.to_dict()[ACTIVITY_ID]
    int2obs = {key: val for key, val in obs2int.items()}
    obs2int_df = pd.DataFrame(list(obs2int.items()),
                              columns=['activity', 'activity_int'])
예제 #9
0
import os
from pm4py.objects.petri.importer import pnml as pnml_importer

net, initial_marking, final_marking = pnml_importer.import_net(
    os.path.join("tests", "input_data", "new net.pnml"))

from pm4py.visualization.petrinet import factory as pn_vis_factory

gviz = pn_vis_factory.apply(net, initial_marking, final_marking)
pn_vis_factory.view(gviz)

from pm4py.objects.petri import semantics

transitions = semantics.enabled_transitions(net, initial_marking)
print(transitions)
예제 #10
0
파일: example.py 프로젝트: iliam/PyDREAM
from pm4py.objects.petri.importer import pnml as pnml_importer
from pm4py.algo.discovery.heuristics import factory as heuristics_miner

from pydream.LogWrapper import LogWrapper
from pydream.EnhancedPN import EnhancedPN
from pydream.predictive.nap.NAP import NAP
from pydream.util.TimedStateSamples import loadTimedStateSamples

if __name__ == "__main__":
    log = xes_import_factory.apply('YOUR_EVENTLOG.xes')

    net, im, fm = heuristics_miner.apply(
        log, parameters={"dependency_thresh": 0.99})
    pnml_exporter.export_net(net, im, "discovered_pn.pnml")

    net, initial_marking, final_marking = pnml_importer.import_net(
        "discovered_pn.pnml")

    log_wrapper = LogWrapper(log)
    enhanced_pn = EnhancedPN(net, initial_marking)
    enhanced_pn.enhance(log_wrapper)
    enhanced_pn.saveToFile("enhanced_discovered_pn.json")

    enhanced_pn = EnhancedPN(net,
                             initial_marking,
                             decay_function_file="enhanced_discovered_pn.json")
    tss_json, tss_objs = enhanced_pn.decay_replay(log_wrapper=log_wrapper)

    with open("timedstatesamples.json", 'w') as fp:
        json.dump(tss_json, fp)

    algo = NAP(tss_train_file="timedstatesamples.json",
예제 #11
0
def cpn_to_petrinet(input_file_path):
    '''
    Convert cpn file into petri net

    Parameters
    ----------
    input_file_path
        Input File path (must be cpn file)
    '''

    doc = ET.parse(input_file_path)
    root = doc.getroot() #load cpn file (xml file)

    for child in root.iter("generator"):
        root.remove(child)

    root.tag = "pnml" #change tag to pnml

    n = 0
    for i in range(len(root[0])):
        tag = root[0][n].tag
        if tag == "page":
            n +=1
        else:
            root[0].remove(root[0][n]) #remove tags that are of no use

    root[0].set("id", "net1")
    root[0].set("type", "http://www.pnml.org/version-2009/grammar/pnmlcoremodel") #set pnml type

    for place in root.iter("place"):
        n = 0
        l = len(place)
        for i in range(l):
            tag = place[n].tag
            if tag == "text":
                n+=1
            else:
                place.remove(place[n]) # remove place's child tags that are of no use

    for trans in root.iter("trans"):
        n = 0
        l = len(trans)
        for i in range(l):
            tag = trans[n].tag
            if tag == "text":
                n+=1
            else:
                trans.remove(trans[n]) # remove trans's child tags that are of no use
        trans.tag = "transition"


    for arc in root.iter("arc"):
        n = 0
        l = len(arc)
        for i in range(l):
            tag = arc[n].tag
            if tag == "transend" or tag == "placeend":
                n+=1
            else:
                arc.remove(arc[n]) #remove arc's child tags that are of no use

    for arc in root.iter("arc"):
        if arc.attrib['orientation'] == 'PtoT':
            arc.set("source", arc.find('placeend').attrib['idref'])
            arc.set("target", arc.find('transend').attrib['idref'])
        else:
            arc.set("source", arc.find('transend').attrib['idref'])
            arc.set("target", arc.find('placeend').attrib['idref']) #change arc into right pnml arc

    for text in root.iter("text"):
        text.tag = "name"


    for name in root.iter("name"):
        text = ET.SubElement(name, "text")
        text.text = name.text
        name.text = None

    out_file = input_file_path[:-3]
    out_file += "pnml"
    doc.write(out_file, encoding="utf-8", xml_declaration=True)
    net, initial_marking, final_marking = pnml_importer.import_net(out_file) #get petrinet object

    return net, initial_marking, final_marking
예제 #12
0
def import_net(net_fname):
    net_fp = os.path.join(MODEL_DIR, net_fname)
    net, init_marking, final_marking = pnml_importer.import_net(net_fp)
    return net, init_marking, final_marking
예제 #13
0
def main(system, miner):
    if DATA_PATH is None:
        log = xes_importer.import_log(
            os.path.join(WORK_PATH, "data", "variants",
                         str(system) + "_train.xes"))
    else:
        log = xes_importer.import_log(
            os.path.join(DATA_PATH, "variants",
                         str(system) + "_train.xes"))

    bestmodel = None
    bestfit = None
    bestPrec = None
    bestGen = 0
    bestfittraces = 0

    gen_bestmodel = None
    gen_bestfit = None
    gen_bestPrec = None
    gen_bestGen = 0

    if DATA_PATH is None:
        dir = os.listdir(os.path.join(WORK_PATH, "data", "pns", str(system)))
    else:
        dir = os.listdir(os.path.join(DATA_PATH, "pns", str(system)))

    for file in dir:
        if system in file and miner in file:
            if DATA_PATH is None:
                path = os.path.join(WORK_PATH, "data", "pns", str(system),
                                    file)
            else:
                path = os.path.join(DATA_PATH, "pns", str(system), file)

            print("Checking conformance of file:", path)

            net, initial_marking, final_marking = pnml_importer.import_net(
                path)

            fitness = replay_factory.apply(log, net, initial_marking,
                                           final_marking)
            precision = precision_factory.apply(log, net, initial_marking,
                                                final_marking)
            generalization = generalization_factory.apply(
                log, net, initial_marking, final_marking)

            if fitness['perc_fit_traces'] > bestfittraces:
                bestfittraces = fitness['perc_fit_traces']
                bestmodel = path
                bestfit = fitness
                bestPrec = precision
                bestGen = generalization

            elif generalization > bestGen and fitness[
                    'perc_fit_traces'] == bestfittraces:
                bestmodel = path
                bestfit = fitness
                bestPrec = precision
                bestGen = generalization

            if generalization > gen_bestGen:
                gen_bestmodel = path
                gen_bestfit = fitness
                gen_bestPrec = precision
                gen_bestGen = generalization

    net, initial_marking, final_marking = pnml_importer.import_net(
        gen_bestmodel)
    try:
        align_fitness = replay_factory.apply(log,
                                             net,
                                             initial_marking,
                                             final_marking,
                                             variant="alignments")
    except:
        align_fitness = {"averageFitness": "N/A"}
    try:
        align_precision = precision_factory.apply(
            log,
            net,
            initial_marking,
            final_marking,
            variant="align_etconformance")
    except:
        align_precision = "N/A"
    print("")
    print("")
    print(
        "*********** Petri net w/ highest ratio of fitting traces and high generalization *************** "
    )
    print("Petri net file:", gen_bestmodel)
    print("Token-based Fitness=", gen_bestfit['average_trace_fitness'])
    print("Token-based Precision=", gen_bestPrec)
    print("Alignment-based Fitness=", align_fitness['averageFitness'])
    print("Alignment-based Precision=", align_precision)
    print("Generalization=", gen_bestGen)

    net, initial_marking, final_marking = pnml_importer.import_net(bestmodel)
    try:
        align_fitness = replay_factory.apply(log,
                                             net,
                                             initial_marking,
                                             final_marking,
                                             variant="alignments")
    except:
        align_fitness = {"averageFitness": "N/A"}

    try:
        align_precision = precision_factory.apply(
            log,
            net,
            initial_marking,
            final_marking,
            variant="align_etconformance")
    except:
        align_precision = "N/A"
    print("")
    print(
        "*********** Petri net w/ highest ratio of fitting traces and high generalization *************** "
    )
    print("Petri net file:", bestmodel)
    print("Token-based Fitness=", bestfit['average_trace_fitness'])
    print("Token-based Precision=", bestPrec)
    print("Alignment-based Fitness=", align_fitness['averageFitness'])
    print("Alignment-based Precision=", align_precision)
    print("Generalization=", bestGen)
예제 #14
0
        f_out = os.path.join(DATA_PATH, "variants", pn + ".txt")
        f_pn = os.path.join(DATA_PATH, "pns", system, pn)

    seq_len = getMaxVariantLength(f_pop)
    n_decimal = 8

    if eval_only:
        print("*** Variant Evaluation of " + system + " using " + pn + " ***")
    else:
        print("*** Playout Variants of " + system + " and Evaluation using " +
              pn + " ***")

    print("Maximum Variant Length is:", str(seq_len))

    if not eval_only:
        net, initial_marking, final_marking = pnml_importer.import_net(f_pn)
        out = playout.apply(net,
                            initial_marking,
                            parameters={
                                "noTraces": n_traces,
                                "maxTraceLength": seq_len - 1
                            })
        writeToFile(f_out, out)

    train = readVariantFile(f_train, unique=True)
    test = readVariantFile(f_test, unique=True)
    pop = readVariantFile(f_pop, unique=True)
    gen = readVariantFile(f_out, unique=True)

    new_train = []
    for i in train: