def execute_script(): net = PetriNet("") source = PetriNet.Place("source") sink = PetriNet.Place("sink") p1 = PetriNet.Place("p1") p2 = PetriNet.Place("p2") p_inhibitor = PetriNet.Place("p_inhibitor") p_reset = PetriNet.Place("p_reset") trans_A = PetriNet.Transition("A", "A") trans_B = PetriNet.Transition("B", "B") trans_C = PetriNet.Transition("C", "C") trans_inhibitor = PetriNet.Transition("inhibitor", None) trans_free = PetriNet.Transition("free", None) net.places.add(source) net.places.add(sink) net.places.add(p1) net.places.add(p2) net.places.add(p_inhibitor) net.places.add(p_reset) net.transitions.add(trans_A) net.transitions.add(trans_B) net.transitions.add(trans_C) net.transitions.add(trans_free) net.transitions.add(trans_inhibitor) add_arc_from_to(source, trans_A, net) add_arc_from_to(trans_A, p1, net) add_arc_from_to(p1, trans_B, net) add_arc_from_to(trans_B, p2, net) add_arc_from_to(p2, trans_C, net) add_arc_from_to(trans_C, sink, net) add_arc_from_to(trans_inhibitor, p_inhibitor, net) inhibitor_arc = add_arc_from_to(p_inhibitor, trans_B, net, type="inhibitor") add_arc_from_to(trans_free, p_reset, net) reset_arc = add_arc_from_to(p_reset, trans_C, net, type="reset") im = Marking({source: 1}) fm = Marking({sink: 1}) pm4py.view_petri_net(net, im, fm, format="svg") m = semantics.execute(trans_A, net, im) print(m) # B is enabled in m because no tokens in the "inhibitor" place print(semantics.enabled_transitions(net, m)) # if we put a token in the inhibitor place, B is not enabled anymore m2 = deepcopy(m) m2[p_inhibitor] = 1 print(semantics.enabled_transitions(net, m2)) # let's continue with m and fire B, and three times the "free" transition m = semantics.execute(trans_B, net, m) m = semantics.execute(trans_free, net, m) m = semantics.execute(trans_free, net, m) m = semantics.execute(trans_free, net, m) # we have three tokens in the 'reset' place. Firing C, all of them are removed because of the reset arc print(m) m = semantics.execute(trans_C, net, m) print(m) print(m == fm)
def execute_script(): net = PetriNet("test") source = PetriNet.Place("source") sink = PetriNet.Place("sink") p1 = PetriNet.Place("p1") p2 = PetriNet.Place("p2") p3 = PetriNet.Place("p3") p4 = PetriNet.Place("p4") t1 = PetriNet.Transition("Confirmation of receipt", "Confirmation of receipt") t2 = PetriNet.Transition("T02 Check confirmation of receipt", "T02 Check confirmation of receipt") t3 = PetriNet.Transition("T04 Determine confirmation of receipt", "T04 Determine confirmation of receipt") t4 = PetriNet.Transition("T05 Print and send confirmation of receipt", "T05 Print and send confirmation of receipt") t5 = PetriNet.Transition("T06 Determine necessity of stop advice", "T06 Determine necessity of stop advice") net.places.add(source) net.places.add(sink) net.places.add(p1) net.places.add(p2) net.places.add(p3) net.places.add(p4) net.transitions.add(t1) net.transitions.add(t2) net.transitions.add(t3) net.transitions.add(t4) net.transitions.add(t5) petri_utils.add_arc_from_to(source, t1, net) petri_utils.add_arc_from_to(t1, p1, net) petri_utils.add_arc_from_to(p1, t2, net) petri_utils.add_arc_from_to(t2, p2, net) petri_utils.add_arc_from_to(p2, t3, net) petri_utils.add_arc_from_to(t3, p3, net) petri_utils.add_arc_from_to(p3, t4, net) petri_utils.add_arc_from_to(t4, p4, net) petri_utils.add_arc_from_to(p4, t5, net) petri_utils.add_arc_from_to(t5, sink, net) im = Marking() im[source] = 1 fm = Marking() fm[sink] = 1 pm4py.view_petri_net(net, im, fm, format="svg")
def execute_script(): log = pm4py.read_xes(os.path.join("..", "tests", "input_data", "roadtraffic100traces.xes")) net, im, fm = pm4py.read_pnml(os.path.join("..", "tests", "input_data", "data_petri_net.pnml")) pm4py.view_petri_net(net, im, fm, format="svg") aligned_traces = alignments.apply(log, net, im, fm, variant=alignments.Variants.VERSION_DIJKSTRA_LESS_MEMORY, parameters={"ret_tuple_as_trans_desc": True}) for index, trace in enumerate(log): aligned_trace = aligned_traces[index] al = [(x[0][0], get_trans_by_name(net, x[0][1])) for x in aligned_trace["alignment"]] m = DataMarking(im) idx = 0 for el in al: if el[1] is not None: en_t = semantics.enabled_transitions(net, m, trace[min(idx, len(trace) - 1)]) if el[1] in en_t: if "guard" in el[1].properties: print(el[1], "GUARD SATISFIED", el[1].properties["guard"], m) m = semantics.execute(el[1], net, m, trace[min(idx, len(trace) - 1)]) else: print("TRANSITION UNAVAILABLE! Guards are blocking") if el[0] != ">>": idx = idx + 1
def visualizeProcess(view, petri_net, initial_marking, final_marking): if view == "Heuristics net": petri_net, initial_marking, final_marking = pm4py.discover_petri_net_alpha( log) if view == "Petri net": petri_net, initial_marking, final_marking = pm4py.discover_petri_net_alpha_plus( log) if view == "Process tree": petri_net, initial_marking, final_marking = pm4py.discover_petri_net_heuristics( log) if view == "BPMN": process = pm4py.view_petri_net(petri_net, initial_marking, final_marking, format='png') return process
def execute_script(): ENABLE_VISUALIZATION = True # reads a XES into an event log log1 = pm4py.read_xes("../tests/input_data/running-example.xes") # reads a CSV into a dataframe df = pd.read_csv("../tests/input_data/running-example.csv") # formats the dataframe with the mandatory columns for process mining purposes df = pm4py.format_dataframe(df, case_id="case:concept:name", activity_key="concept:name", timestamp_key="time:timestamp") # converts the dataframe to an event log log2 = pm4py.convert_to_event_log(df) # converts the log read from XES into a stream and dataframe respectively stream1 = pm4py.convert_to_event_stream(log1) df2 = pm4py.convert_to_dataframe(log1) # writes the log1 to a XES file pm4py.write_xes(log1, "ru1.xes") dfg, dfg_sa, dfg_ea = pm4py.discover_dfg(log1) petri_alpha, im_alpha, fm_alpha = pm4py.discover_petri_net_alpha(log1) petri_inductive, im_inductive, fm_inductive = pm4py.discover_petri_net_inductive( log1) petri_heuristics, im_heuristics, fm_heuristics = pm4py.discover_petri_net_heuristics( log1) tree_inductive = pm4py.discover_tree_inductive(log1) heu_net = pm4py.discover_heuristics_net(log1) pm4py.write_dfg(dfg, dfg_sa, dfg_ea, "ru_dfg.dfg") pm4py.write_petri_net(petri_alpha, im_alpha, fm_alpha, "ru_alpha.pnml") pm4py.write_petri_net(petri_inductive, im_inductive, fm_inductive, "ru_inductive.pnml") pm4py.write_petri_net(petri_heuristics, im_heuristics, fm_heuristics, "ru_heuristics.pnml") pm4py.write_process_tree(tree_inductive, "ru_inductive.ptml") dfg, dfg_sa, dfg_ea = pm4py.read_dfg("ru_dfg.dfg") petri_alpha, im_alpha, fm_alpha = pm4py.read_petri_net("ru_alpha.pnml") petri_inductive, im_inductive, fm_inductive = pm4py.read_petri_net( "ru_inductive.pnml") petri_heuristics, im_heuristics, fm_heuristics = pm4py.read_petri_net( "ru_heuristics.pnml") tree_inductive = pm4py.read_process_tree("ru_inductive.ptml") pm4py.save_vis_petri_net(petri_alpha, im_alpha, fm_alpha, "ru_alpha.png") pm4py.save_vis_petri_net(petri_inductive, im_inductive, fm_inductive, "ru_inductive.png") pm4py.save_vis_petri_net(petri_heuristics, im_heuristics, fm_heuristics, "ru_heuristics.png") pm4py.save_vis_process_tree(tree_inductive, "ru_inductive_tree.png") pm4py.save_vis_heuristics_net(heu_net, "ru_heunet.png") pm4py.save_vis_dfg(dfg, dfg_sa, dfg_ea, "ru_dfg.png") if ENABLE_VISUALIZATION: pm4py.view_petri_net(petri_alpha, im_alpha, fm_alpha, format="svg") pm4py.view_petri_net(petri_inductive, im_inductive, fm_inductive, format="svg") pm4py.view_petri_net(petri_heuristics, im_heuristics, fm_heuristics, format="svg") pm4py.view_process_tree(tree_inductive, format="svg") pm4py.view_heuristics_net(heu_net, format="svg") pm4py.view_dfg(dfg, dfg_sa, dfg_ea, format="svg") aligned_traces = pm4py.conformance_alignments(log1, petri_inductive, im_inductive, fm_inductive) replayed_traces = pm4py.conformance_tbr(log1, petri_inductive, im_inductive, fm_inductive) fitness_tbr = pm4py.evaluate_fitness_tbr(log1, petri_inductive, im_inductive, fm_inductive) print("fitness_tbr", fitness_tbr) fitness_align = pm4py.evaluate_fitness_alignments(log1, petri_inductive, im_inductive, fm_inductive) print("fitness_align", fitness_align) precision_tbr = pm4py.evaluate_precision_tbr(log1, petri_inductive, im_inductive, fm_inductive) print("precision_tbr", precision_tbr) precision_align = pm4py.evaluate_precision_alignments( log1, petri_inductive, im_inductive, fm_inductive) print("precision_align", precision_align) print("log start activities = ", pm4py.get_start_activities(log2)) print("df start activities = ", pm4py.get_start_activities(df2)) print("log end activities = ", pm4py.get_end_activities(log2)) print("df end activities = ", pm4py.get_end_activities(df2)) print("log attributes = ", pm4py.get_attributes(log2)) print("df attributes = ", pm4py.get_attributes(df2)) print("log org:resource values = ", pm4py.get_attribute_values(log2, "org:resource")) print("df org:resource values = ", pm4py.get_attribute_values(df2, "org:resource")) print("start_activities len(filt_log) = ", len(pm4py.filter_start_activities(log2, ["register request"]))) print("start_activities len(filt_df) = ", len(pm4py.filter_start_activities(df2, ["register request"]))) print("end_activities len(filt_log) = ", len(pm4py.filter_end_activities(log2, ["pay compensation"]))) print("end_activities len(filt_df) = ", len(pm4py.filter_end_activities(df2, ["pay compensation"]))) print( "attributes org:resource len(filt_log) (cases) cases = ", len( pm4py.filter_attribute_values(log2, "org:resource", ["Ellen"], level="case"))) print( "attributes org:resource len(filt_log) (cases) events = ", len( pm4py.filter_attribute_values(log2, "org:resource", ["Ellen"], level="event"))) print( "attributes org:resource len(filt_df) (events) cases = ", len( pm4py.filter_attribute_values(df2, "org:resource", ["Ellen"], level="case"))) print( "attributes org:resource len(filt_df) (events) events = ", len( pm4py.filter_attribute_values(df2, "org:resource", ["Ellen"], level="event"))) print( "attributes org:resource len(filt_df) (events) events notpositive = ", len( pm4py.filter_attribute_values(df2, "org:resource", ["Ellen"], level="event", retain=False))) print("variants log = ", pm4py.get_variants(log2)) print("variants df = ", pm4py.get_variants(df2)) print( "variants filter log = ", len( pm4py.filter_variants(log2, [[ "register request", "examine thoroughly", "check ticket", "decide", "reject request" ]]))) print( "variants filter df = ", len( pm4py.filter_variants(df2, [[ "register request", "examine thoroughly", "check ticket", "decide", "reject request" ]]))) print("variants filter percentage = ", len(pm4py.filter_variants_percentage(log2, threshold=0.8))) print( "paths filter log len = ", len( pm4py.filter_directly_follows_relation( log2, [("register request", "examine casually")]))) print( "paths filter dataframe len = ", len( pm4py.filter_directly_follows_relation( df2, [("register request", "examine casually")]))) print( "timeframe filter log events len = ", len( pm4py.filter_time_range(log2, "2011-01-01 00:00:00", "2011-02-01 00:00:00", mode="events"))) print( "timeframe filter log traces_contained len = ", len( pm4py.filter_time_range(log2, "2011-01-01 00:00:00", "2011-02-01 00:00:00", mode="traces_contained"))) print( "timeframe filter log traces_intersecting len = ", len( pm4py.filter_time_range(log2, "2011-01-01 00:00:00", "2011-02-01 00:00:00", mode="traces_intersecting"))) print( "timeframe filter df events len = ", len( pm4py.filter_time_range(df2, "2011-01-01 00:00:00", "2011-02-01 00:00:00", mode="events"))) print( "timeframe filter df traces_contained len = ", len( pm4py.filter_time_range(df2, "2011-01-01 00:00:00", "2011-02-01 00:00:00", mode="traces_contained"))) print( "timeframe filter df traces_intersecting len = ", len( pm4py.filter_time_range(df2, "2011-01-01 00:00:00", "2011-02-01 00:00:00", mode="traces_intersecting"))) # remove the temporary files os.remove("ru1.xes") os.remove("ru_dfg.dfg") os.remove("ru_alpha.pnml") os.remove("ru_inductive.pnml") os.remove("ru_heuristics.pnml") os.remove("ru_inductive.ptml") os.remove("ru_alpha.png") os.remove("ru_inductive.png") os.remove("ru_heuristics.png") os.remove("ru_inductive_tree.png") os.remove("ru_heunet.png") os.remove("ru_dfg.png")
case_id='case_id', activity_key='activity', timestamp_key='timestamp') start_activities = pm4py.get_start_activities(event_log) end_activities = pm4py.get_end_activities(event_log) print("Start activities: {}\nEnd activities: {}".format( start_activities, end_activities)) # Convert from CSV to XES pm4py.write_xes(event_log, 'running-example-exported.xes') # Algorithm alpha log = pm4py.read_xes('running-example-exported.xes') net, initial_marking, final_marking = pm4py.algo.discovery.alpha.algorithm.apply( log) pm4py.view_petri_net(net, initial_marking, final_marking) ##2 # Preparing the log --- Resource event_log = pd.read_csv('running-exampleNew.csv', sep=';') event_log = pm4py.format_dataframe(event_log, case_id='case_id', activity_key='resource', timestamp_key='timestamp') start_activities = pm4py.get_start_activities(event_log) end_activities = pm4py.get_end_activities(event_log) print("Start activities: {}\nEnd activities: {}".format( start_activities, end_activities)) # Convert from CSV to XES pm4py.write_xes(event_log, 'running-example-exported.xes')