def test_applyAlphaMinerToCSV(self): # to avoid static method warnings in tests, # that by construction of the unittest package have to be expressed in such way self.dummy_variable = "dummy_value" # calculate and compare Petri nets obtained on the same log to verify that instances # are working correctly log1, net1, marking1, fmarking1 = self.obtainPetriNetThroughAlphaMiner( os.path.join(INPUT_DATA_DIR, "running-example.csv")) log2, net2, marking2, fmarking2 = self.obtainPetriNetThroughAlphaMiner( os.path.join(INPUT_DATA_DIR, "running-example.csv")) log1 = sorting.sort_timestamp(log1) log1 = sampling.sample(log1) log1 = index_attribute.insert_trace_index_as_event_attribute(log1) log2 = sorting.sort_timestamp(log2) log2 = sampling.sample(log2) log2 = index_attribute.insert_trace_index_as_event_attribute(log2) petri_exporter.apply(net1, marking1, os.path.join(OUTPUT_DATA_DIR, "running-example.pnml")) os.remove(os.path.join(OUTPUT_DATA_DIR, "running-example.pnml")) self.assertEqual(len(net1.places), len(net2.places)) self.assertEqual(len(net1.transitions), len(net2.transitions)) self.assertEqual(len(net1.arcs), len(net2.arcs)) final_marking = petri.petrinet.Marking() for p in net1.places: if not p.out_arcs: final_marking[p] = 1 aligned_traces = token_replay.apply(log1, net1, marking1, final_marking) self.assertEqual(aligned_traces, aligned_traces)
def test_importingExportingPetri(self): # to avoid static method warnings in tests, # that by construction of the unittest package have to be expressed in such way self.dummy_variable = "dummy_value" imported_petri1, marking1, fmarking1 = petri_importer.apply( os.path.join(INPUT_DATA_DIR, "running-example.pnml")) soundness = check_soundness.check_petri_wfnet_and_soundness( imported_petri1) del soundness petri_exporter.apply( imported_petri1, marking1, os.path.join(OUTPUT_DATA_DIR, "running-example.pnml")) imported_petri2, marking2, fmarking2 = petri_importer.apply( os.path.join(OUTPUT_DATA_DIR, "running-example.pnml")) soundness = check_soundness.check_petri_wfnet_and_soundness( imported_petri2) del soundness self.assertEqual(sorted([x.name for x in imported_petri1.places]), sorted([x.name for x in imported_petri2.places])) self.assertEqual(sorted([x.name for x in imported_petri1.transitions]), sorted([x.name for x in imported_petri2.transitions])) self.assertEqual( sorted( [x.source.name + x.target.name for x in imported_petri1.arcs]), sorted( [x.source.name + x.target.name for x in imported_petri2.arcs])) self.assertEqual([x.name for x in marking1], [x.name for x in marking2]) os.remove(os.path.join(OUTPUT_DATA_DIR, "running-example.pnml"))
def test_51(self): import os from pm4py.objects.petri.importer import importer as pnml_importer net, initial_marking, final_marking = pnml_importer.apply( os.path.join("input_data", "running-example.pnml")) from pm4py.visualization.petrinet import visualizer as pn_visualizer gviz = pn_visualizer.apply(net, initial_marking, final_marking) from pm4py.objects.petri.exporter import exporter as pnml_exporter pnml_exporter.apply(net, initial_marking, "petri.pnml") pnml_exporter.apply(net, initial_marking, "petri_final.pnml", final_marking=final_marking) os.remove("petri.pnml") os.remove("petri_final.pnml") from pm4py.objects.petri import semantics transitions = semantics.enabled_transitions(net, initial_marking) places = net.places transitions = net.transitions arcs = net.arcs for place in places: stru = "\nPLACE: " + place.name for arc in place.in_arcs: stru = str(arc.source.name) + " " + str(arc.source.label)
def write_petri_net(petri_net: PetriNet, initial_marking: Marking, final_marking: Marking, file_path: str) -> None: warnings.warn('write_petri_net is deprecated, please use write_pnml', DeprecationWarning) """ Exports a (composite) Petri net object Parameters ------------ petri_net Petri net initial_marking Initial marking final_marking Final marking file_path Destination path Returns ------------ void """ from pm4py.objects.petri.exporter import exporter as petri_exporter petri_exporter.apply(petri_net, initial_marking, file_path, final_marking=final_marking)
def write_pnml(petri_net: PetriNet, initial_marking: Marking, final_marking: Marking, file_path: str) -> None: """ Exports a (composite) Petri net object Parameters ------------ petri_net Petri net initial_marking Initial marking final_marking Final marking file_path Destination path Returns ------------ void """ from pm4py.objects.petri.exporter import exporter as petri_exporter petri_exporter.apply(petri_net, initial_marking, file_path, final_marking=final_marking)
def test_52(self): # creating an empty Petri net from pm4py.objects.petri.petrinet import PetriNet, Marking net = PetriNet("new_petri_net") # creating source, p_1 and sink place source = PetriNet.Place("source") sink = PetriNet.Place("sink") p_1 = PetriNet.Place("p_1") # add the places to the Petri Net net.places.add(source) net.places.add(sink) net.places.add(p_1) # Create transitions t_1 = PetriNet.Transition("name_1", "label_1") t_2 = PetriNet.Transition("name_2", "label_2") # Add the transitions to the Petri Net net.transitions.add(t_1) net.transitions.add(t_2) # Add arcs from pm4py.objects.petri import utils utils.add_arc_from_to(source, t_1, net) utils.add_arc_from_to(t_1, p_1, net) utils.add_arc_from_to(p_1, t_2, net) utils.add_arc_from_to(t_2, sink, net) # Adding tokens initial_marking = Marking() initial_marking[source] = 1 final_marking = Marking() final_marking[sink] = 1 from pm4py.objects.petri.exporter import exporter as pnml_exporter pnml_exporter.apply(net, initial_marking, "createdPetriNet1.pnml", final_marking=final_marking) from pm4py.visualization.petrinet import visualizer as pn_visualizer gviz = pn_visualizer.apply(net, initial_marking, final_marking) from pm4py.visualization.petrinet import visualizer as pn_visualizer parameters = {pn_visualizer.Variants.WO_DECORATION.value.Parameters.FORMAT: "svg"} gviz = pn_visualizer.apply(net, initial_marking, final_marking, parameters=parameters) from pm4py.visualization.petrinet import visualizer as pn_visualizer parameters = {pn_visualizer.Variants.WO_DECORATION.value.Parameters.FORMAT: "svg"} gviz = pn_visualizer.apply(net, initial_marking, final_marking, parameters=parameters) pn_visualizer.save(gviz, "alpha.svg") os.remove("createdPetriNet1.pnml") os.remove("alpha.svg")
def test_alphaMinerVisualizationFromXES(self): # to avoid static method warnings in tests, # that by construction of the unittest package have to be expressed in such way self.dummy_variable = "dummy_value" log, net, marking, fmarking = self.obtainPetriNetThroughAlphaMiner( os.path.join(INPUT_DATA_DIR, "running-example.xes")) log = sorting.sort_timestamp(log) log = sampling.sample(log) log = index_attribute.insert_trace_index_as_event_attribute(log) petri_exporter.apply(net, marking, os.path.join(OUTPUT_DATA_DIR, "running-example.pnml")) os.remove(os.path.join(OUTPUT_DATA_DIR, "running-example.pnml")) gviz = pn_viz.graphviz_visualization(net) self.assertEqual(gviz, gviz) final_marking = petri.petrinet.Marking() for p in net.places: if not p.out_arcs: final_marking[p] = 1 aligned_traces = token_replay.apply(log, net, marking, fmarking) self.assertEqual(aligned_traces, aligned_traces)
import os from pm4py.objects.log.importer.xes import importer as xes_importer logfile = "D://process mining//HMM//pnml//reference//reference7.xes" log = xes_importer.apply(os.path.join(logfile)) from pm4py.algo.discovery.alpha import algorithm as alpha_miner net, initial_marking, final_marking = alpha_miner.apply(log) from pm4py.visualization.petrinet import visualizer as pn_visualizer gviz = pn_visualizer.apply(net, initial_marking, final_marking) pn_visualizer.view(gviz) from pm4py.objects.petri.exporter import exporter as pnml_exporter filename = "Alpha Miner.pnml" pnml_exporter.apply(net, initial_marking, filename)
utils.add_arc_from_to(t_4, p_3, net) utils.add_arc_from_to(p_3, t_6, net) utils.add_arc_from_to(t_6, p_4, net) utils.add_arc_from_to(p_4, t_5, net) utils.add_arc_from_to(p_4, t_7, net) utils.add_arc_from_to(t_5, p_2, net) utils.add_arc_from_to(t_7, p_5, net) utils.add_arc_from_to(t_7, p_6, net) utils.add_arc_from_to(p_5, t_9, net) utils.add_arc_from_to(p_6, t_10, net) utils.add_arc_from_to(t_9, p_7, net) utils.add_arc_from_to(t_10, p_7, net) utils.add_arc_from_to(p_7, t_8, net) utils.add_arc_from_to(t_8, p_3, net) initial_marking = Marking() initial_marking[start] = 1 final_marking = Marking() final_marking[end] = 1 from pm4py.objects.petri.exporter import exporter as pnml_exporter pnml_exporter.apply(net, initial_marking, "createdPetriNet1.pnml", final_marking=final_marking) from pm4py.visualization.petrinet import visualizer as pn_visualizer gviz = pn_visualizer.apply(net, initial_marking, final_marking) pn_visualizer.view(gviz) from pm4py.objects.petri.exporter import exporter as pnml_exporter filename="petri.pnml" pnml_exporter.apply(net, initial_marking, filename)
import sys, os import pandas as pd from pm4py.objects.conversion.log import converter as log_converter from pm4py.algo.discovery.inductive import algorithm as inductive_miner from pm4py.objects.conversion.process_tree import converter as pt_converter from pm4py.objects.petri.exporter import exporter as pnml_exporter from pm4py.algo.discovery.alpha import algorithm as alpha_miner dirname = sys.argv[1] # dirname = "C:/Users/User/Desktop/DIPLOM/SampleHelloWorldProject/_behavioral_model_data/logs" for filename in os.listdir(dirname): if (filename.endswith(".csv")): log_csv = pd.read_csv(dirname + "/" + filename, sep=',') log_csv.rename(columns={'activity name': 'concept:name'}, inplace=True) log_csv.rename(columns={'case ID': 'case:concept:name'}, inplace=True) event_log = log_converter.apply( log_csv, variant=log_converter.Variants.TO_EVENT_LOG) net, initial_marking, final_marking = alpha_miner.apply(event_log) for transition in net.transitions: transition.name = transition.name.replace( " ", "_") + "_" + filename.split('.')[0] transition.label = transition.label.replace(" ", "_") for place in net.places: place.name = place.name.replace(" ", "_") + "_" + filename.split('.')[0] # tree = inductive_miner.apply_tree(event_log) # net, initial_marking, final_marking = pt_converter.apply(tree, variant=pt_converter.Variants.TO_PETRI_NET) pnml_exporter.apply(net, initial_marking, dirname + "/" + filename.split('.')[0] + ".pnml")
import os from pm4py.objects.log.importer.xes import importer as xes_importer logfile = "D://process mining//HMM//pnml//reference//reference5.xes" log = xes_importer.apply(os.path.join(logfile)) from pm4py.algo.discovery.heuristics import algorithm as heuristics_miner net, im, fm = heuristics_miner.apply( log, parameters={ heuristics_miner.Variants.CLASSIC.value.Parameters.DEPENDENCY_THRESH: 0.99 }) from pm4py.visualization.petrinet import visualizer as pn_visualizer gviz = pn_visualizer.apply(net, im, fm) pn_visualizer.view(gviz) from pm4py.objects.petri.exporter import exporter as pnml_exporter filename = "Heuristic Miner.pnml" pnml_exporter.apply(net, im, filename)
def discover_process_models(log_path, log_name): custom_print('Importando log') log_complete = xes_importer.apply(log_path) log = variants_filter.filter_log_variants_percentage(log_complete, 0.9) #A_ACTIVATED, A_DECLINED, A_CANCELLED #log = attributes_filter.apply(log_complete, ["A_ACTIVATED"], parameters={attributes_filter.Parameters.ATTRIBUTE_KEY: "concept:name", attributes_filter.Parameters.POSITIVE: True}) custom_print('Log importado') if (1 == 2): #Inductive Miner custom_print('Iniciando Inductive Miner') parameters = { inductive_miner.Variants.IM.value.Parameters.CASE_ID_KEY: 'case:concept:name', inductive_miner.Variants.IM.value.Parameters.TIMESTAMP_KEY: 'time:timestamp' } variant = inductive_miner.Variants.IM petrinet = inductive_miner.apply(log, parameters=parameters, variant=variant) print_statistics(petrinet[0], 'IM') custom_print('Inductive Miner finalizado\n') if (1 == 2): #Inductive Miner Infrequent 0.2 custom_print('Iniciando Inductive Miner Infrequent 0.2') parameters = { inductive_miner.Variants.IMf.value.Parameters.NOISE_THRESHOLD: 0.2, inductive_miner.Variants.IMf.value.Parameters.CASE_ID_KEY: 'case:concept:name', inductive_miner.Variants.IMf.value.Parameters.TIMESTAMP_KEY: 'time:timestamp' } variant = inductive_miner.Variants.IMf petrinet = inductive_miner.apply(log, parameters=parameters, variant=variant) print_statistics(petrinet[0], 'IMf0.2') custom_print('Inductive Miner Infrequent 0.2 finalizado\n') if (1 == 1): #Inductive Miner Infrequent 0.5 custom_print('Iniciando Inductive Miner Infrequent 0.5') parameters = { inductive_miner.Variants.IMf.value.Parameters.NOISE_THRESHOLD: 0.5, inductive_miner.Variants.IMf.value.Parameters.CASE_ID_KEY: 'case:concept:name', inductive_miner.Variants.IMf.value.Parameters.TIMESTAMP_KEY: 'time:timestamp' } variant = inductive_miner.Variants.IMf petrinet, initial_marking, final_marking = inductive_miner.apply( log, parameters=parameters, variant=variant) print_statistics(petrinet, 'IMf0.5') custom_print('Inductive Miner Infrequent 0.5 finalizado\n') ts = reachability_graph.construct_reachability_graph( petrinet, initial_marking) gviz = ts_visualizer.apply( ts, parameters={ ts_visualizer.Variants.VIEW_BASED.value.Parameters.FORMAT: "png" }) gviz.render('petrinets/simple-reach', cleanup=True) pnml_exporter.apply(petrinet, initial_marking, "petrinets/simple-petri.pnml") if (1 == 2): #Inductive Miner Infrequent 0.8 custom_print('Iniciando Inductive Miner Infrequent 0.8') parameters = { inductive_miner.Variants.IMf.value.Parameters.NOISE_THRESHOLD: 0.8, inductive_miner.Variants.IMf.value.Parameters.CASE_ID_KEY: 'case:concept:name', inductive_miner.Variants.IMf.value.Parameters.TIMESTAMP_KEY: 'time:timestamp' } variant = inductive_miner.Variants.IMf petrinet = inductive_miner.apply(log, parameters=parameters, variant=variant) print_statistics(petrinet[0], 'IMf0.8') custom_print('Inductive Miner Infrequent 0.8 finalizado\n') if (1 == 2): #Inductive Miner Directly-Follows custom_print('Iniciando Inductive Miner Directly-Follows') parameters = { inductive_miner.Variants.IMd.value.Parameters.CASE_ID_KEY: 'case:concept:name', inductive_miner.Variants.IMd.value.Parameters.TIMESTAMP_KEY: 'time:timestamp' } variant = inductive_miner.Variants.IMd petrinet = inductive_miner.apply(log, parameters=parameters, variant=variant) print_statistics(petrinet[0], 'IMd') custom_print('Inductive Miner Infrequent Directly-Follows\n') if (1 == 2): #Alpha Miner custom_print('Iniciando Alpha Miner') parameters = {} variant = alpha_miner.Variants.ALPHA_VERSION_CLASSIC petrinet = alpha_miner.apply(log, parameters=parameters, variant=variant) print_statistics(petrinet[0], 'Alpha') custom_print('Alpha Miner finalizado\n') if (1 == 2): #Heuristic Miner 0.5 custom_print('Iniciando Heuristic Miner 0.5') parameters = { heuristics_miner.Variants.CLASSIC.value.Parameters.DEPENDENCY_THRESH: 0.5 } petrinet = heuristics_miner.apply(log, parameters=parameters) print_statistics(petrinet[0], 'HM0.5') custom_print('Heuristic Miner 0.5 finalizado\n') if (1 == 2): #Heuristic Miner 0.99 custom_print('Iniciando Heuristic Miner 0.99') parameters = { heuristics_miner.Variants.CLASSIC.value.Parameters.DEPENDENCY_THRESH: 0.99 } petrinet = heuristics_miner.apply(log, parameters=parameters) print_statistics(petrinet[0], 'HM0.99') custom_print('Heuristic Miner 0.99 finalizado\n') if (1 == 2): #Heuristic Miner 0.1 custom_print('Iniciando Heuristic Miner 0.1') parameters = { heuristics_miner.Variants.CLASSIC.value.Parameters.DEPENDENCY_THRESH: 0.1 } petrinet = heuristics_miner.apply(log, parameters=parameters) print_statistics(petrinet[0], 'HM0.1') custom_print('Heuristic Miner 0.1 finalizado\n') if (1 == 2): #Heuristic Miner 1.0 custom_print('Iniciando Heuristic Miner 1.0') parameters = { heuristics_miner.Variants.CLASSIC.value.Parameters.DEPENDENCY_THRESH: 1.0 } petrinet = heuristics_miner.apply(log, parameters=parameters) print_statistics(petrinet[0], 'HM1.0') custom_print('Heuristic Miner 1.0 finalizado\n') if (1 == 2): #DFG custom_print('Iniciando DFG') dfg = dfg_discovery.apply(log) parameters = { dfg_visualization.Variants.FREQUENCY.value.Parameters.FORMAT: 'png' } gviz = dfg_visualization.apply( dfg, log=log, variant=dfg_visualization.Variants.FREQUENCY, parameters=parameters) dfg_visualization.save(gviz, 'petrinets/simple-DFG.png') custom_print('DFG finalizado\n')
from pm4py.algo.discovery.alpha import algorithm as alpha_miner parameters = {alpha_miner.Variants.ALPHA_CLASSIC.value.Parameters.ACTIVITY_KEY: "customClassifier"} net, initial_marking, final_marking = alpha_miner.apply(log, parameters=parameters) #Petrinet Management import os from pm4py.objects.petri.importer import importer as pnml_importer net, initial_marking, final_marking = pnml_importer.apply(os.path.join("tests","input_data","running-example.pnml")) from pm4py.visualization.petrinet import visualizer as pn_visualizer gviz = pn_visualizer.apply(net, initial_marking, final_marking) pn_visualizer.view(gviz) from pm4py.objects.petri.exporter import exporter as pnml_exporter pnml_exporter.apply(net, initial_marking, "petri.pnml") pnml_exporter.apply(net, initial_marking, "petri_final.pnml", final_marking=final_marking) #tree generation from pm4py.simulation.tree_generator import simulator as tree_gen parameters = {} tree = tree_gen.apply(parameters=parameters) from pm4py.objects.process_tree import semantics log = semantics.generate_log(tree, no_traces=100) from pm4py.objects.conversion.process_tree import converter as pt_converter net, im, fm = pt_converter.apply(tree)
from pm4py.objects.log.importer.xes import importer as xes_importer from pm4py.objects.petri.exporter import exporter as pnml_exporter from pm4py.objects.petri.importer import importer as pnml_importer from pm4py.algo.discovery.inductive import algorithm as inductive_miner from pydream.LogWrapper import LogWrapper from pydream.EnhancedPN import EnhancedPN from pydream.predictive.nap.NAP import NAP if __name__ == "__main__": log = xes_importer.apply('sample_data\\toy.xes') net, im, fm = inductive_miner.apply(log) pnml_exporter.apply(net, im, "sample_data\\discovered_pn.pnml", fm) net, initial_marking, final_marking = pnml_importer.apply( "sample_data\\discovered_pn.pnml") log_wrapper = LogWrapper(log) enhanced_pn = EnhancedPN(net, initial_marking) enhanced_pn.enhance(log_wrapper) enhanced_pn.saveToFile("sample_data\\enhanced_discovered_pn.json") enhanced_pn = EnhancedPN( net, initial_marking, decay_function_file="sample_data\\enhanced_discovered_pn.json") tss_json, tss_objs = enhanced_pn.decay_replay(log_wrapper=log_wrapper)