def test_dfg(self): log = pm4py.read_xes("input_data/running-example.xes") dfg, sa, ea = pm4py.discover_dfg(log)
def test_precision_alignments(self): log = pm4py.read_xes("input_data/running-example.xes") net, im, fm = pm4py.discover_petri_net_inductive(log) precision_ali = pm4py.precision_alignments(log, net, im, fm)
def test_convert_to_net_from_heu(self): log = pm4py.read_xes("input_data/running-example.xes") heu_net = pm4py.discover_heuristics_net(log) net, im, fm = pm4py.convert_to_petri_net(heu_net) self.assertTrue(isinstance(net, PetriNet))
def test_heuristics_miner_heu_net(self): log = pm4py.read_xes("input_data/running-example.xes") heu_net = pm4py.discover_heuristics_net(log)
def test_alignments(self): log = pm4py.read_xes("input_data/running-example.xes") net, im, fm = pm4py.discover_petri_net_inductive(log) aligned_traces = pm4py.conformance_diagnostics_alignments( log, net, im, fm)
def test_case_arrival(self): import pm4py log = pm4py.read_xes(os.path.join("input_data", "running-example.xes")) avg = pm4py.get_case_arrival_average(log) self.assertIsNotNone(avg)
def test_inductive_miner_noise(self): log = pm4py.read_xes("input_data/running-example.xes") net, im, fm = pm4py.discover_petri_net_inductive(log, noise_threshold=0.5)
def execute_script(): log = pm4py.read_xes( os.path.join("..", "tests", "input_data", "receipt.xes")) rework = rework_get.apply(log) print(rework)
def test_tree_align_receipt(self): import pm4py log = pm4py.read_xes("input_data/receipt.xes") tree = pm4py.discover_process_tree_inductive(log, noise_threshold=0.2) al = pm4py.conformance_diagnostics_alignments(log, tree)
Χρήση παραδοσιακών αλγορίθμων process mining. """ import pm4py from pm4py.algo.filtering.log.timestamp import timestamp_filter from pm4py.algo.discovery.alpha import algorithm as alpha_miner from pm4py.algo.discovery.heuristics import algorithm as heuristics_miner from pm4py.algo.discovery.inductive import algorithm as inductive_miner from pm4py.visualization.petrinet import visualizer, parameters from pm4py.algo.conformance.alignments import algorithm as alignment from pm4py.objects.petri.align_utils import pretty_print_alignments from contextlib import redirect_stdout from pm4py.evaluation.simplicity import evaluator as simplicity_factory from pm4py.evaluation.precision import evaluator as precision_evaluator from pm4py.evaluation.replay_fitness import evaluator as fitness_evaluator log = pm4py.read_xes( r'C:\Users\user\Desktop\ΔΙΠΛΩΜΑΤΙΚΗ\12696884\BPI Challenge 2017.xes\BPI Challenge 2017.xes' ) log_2 = log[:1000] #log = log[:1000] Για να βρώ το Precision παιρνω 1000 traces γιατί αλλιώς κάνει πάνω από 2 ώρες για να το βρεί. """ ALPHA-MINER """ alpha_petri, initial_marking, final_marking = alpha_miner.apply(log) gviz = visualizer.apply(alpha_petri, initial_marking, final_marking) visualizer.view(gviz) alpha_petri_2, initial_marking, final_marking = alpha_miner.apply( log_2) # Για το precision """ HEURISTIC-MINER
def test_align(self): log = pm4py.read_xes("input_data/running-example.xes") net, im, fm = pm4py.discover_petri_net_inductive(log, noise_threshold=0.2) aligned_traces = alignments.apply(log, net, im, fm) diagn_df = alignments.get_diagnostics_dataframe(log, aligned_traces)
def test_minimum_self_distance(self): import pm4py from pm4py.algo.discovery.minimum_self_distance import algorithm as minimum_self_distance log = pm4py.read_xes(os.path.join("input_data", "running-example.xes")) msd = minimum_self_distance.apply(log)
def test_log_to_trie(self): import pm4py from pm4py.algo.transformation.log_to_trie import algorithm as log_to_trie log = pm4py.read_xes(os.path.join("input_data", "running-example.xes")) trie = log_to_trie.apply(log)
def test_dfg_playout(self): import pm4py from pm4py.algo.simulation.playout.dfg import algorithm as dfg_playout log = pm4py.read_xes(os.path.join("input_data", "running-example.xes")) dfg, sa, ea = pm4py.discover_dfg(log) dfg_playout.apply(dfg, sa, ea)
def test_marking_equation_net(self): import pm4py log = pm4py.read_xes(os.path.join("input_data", "running-example.xes")) net, im, fm = pm4py.discover_petri_net_inductive(log) pm4py.solve_marking_equation(net, im, fm)
def test_tree_align_reviewing(self): import pm4py log = pm4py.read_xes("compressed_input_data/04_reviewing.xes.gz") tree = pm4py.discover_process_tree_inductive(log, noise_threshold=0.2) al = pm4py.conformance_diagnostics_alignments(log, tree)
def test_discovery_inductive_bpmn(self): import pm4py log = pm4py.read_xes(os.path.join("input_data", "running-example.xes")) bpmn_graph = pm4py.discover_bpmn_inductive(log) self.assertIsNotNone(bpmn_graph)
def execute_script(): ENABLE_VISUALIZATION = True # reads a XES into an event log log1 = pm4py.read_xes("../tests/input_data/running-example.xes") # reads a CSV into a dataframe df = pd.read_csv("../tests/input_data/running-example.csv") # formats the dataframe with the mandatory columns for process mining purposes df = pm4py.format_dataframe(df, case_id="case:concept:name", activity_key="concept:name", timestamp_key="time:timestamp") # converts the dataframe to an event log log2 = pm4py.convert_to_event_log(df) # converts the log read from XES into a stream and dataframe respectively stream1 = pm4py.convert_to_event_stream(log1) df2 = pm4py.convert_to_dataframe(log1) # writes the log1 to a XES file pm4py.write_xes(log1, "ru1.xes") dfg, dfg_sa, dfg_ea = pm4py.discover_dfg(log1) petri_alpha, im_alpha, fm_alpha = pm4py.discover_petri_net_alpha(log1) petri_inductive, im_inductive, fm_inductive = pm4py.discover_petri_net_inductive( log1) petri_heuristics, im_heuristics, fm_heuristics = pm4py.discover_petri_net_heuristics( log1) tree_inductive = pm4py.discover_tree_inductive(log1) heu_net = pm4py.discover_heuristics_net(log1) pm4py.write_dfg(dfg, dfg_sa, dfg_ea, "ru_dfg.dfg") pm4py.write_petri_net(petri_alpha, im_alpha, fm_alpha, "ru_alpha.pnml") pm4py.write_petri_net(petri_inductive, im_inductive, fm_inductive, "ru_inductive.pnml") pm4py.write_petri_net(petri_heuristics, im_heuristics, fm_heuristics, "ru_heuristics.pnml") pm4py.write_process_tree(tree_inductive, "ru_inductive.ptml") dfg, dfg_sa, dfg_ea = pm4py.read_dfg("ru_dfg.dfg") petri_alpha, im_alpha, fm_alpha = pm4py.read_petri_net("ru_alpha.pnml") petri_inductive, im_inductive, fm_inductive = pm4py.read_petri_net( "ru_inductive.pnml") petri_heuristics, im_heuristics, fm_heuristics = pm4py.read_petri_net( "ru_heuristics.pnml") tree_inductive = pm4py.read_process_tree("ru_inductive.ptml") pm4py.save_vis_petri_net(petri_alpha, im_alpha, fm_alpha, "ru_alpha.png") pm4py.save_vis_petri_net(petri_inductive, im_inductive, fm_inductive, "ru_inductive.png") pm4py.save_vis_petri_net(petri_heuristics, im_heuristics, fm_heuristics, "ru_heuristics.png") pm4py.save_vis_process_tree(tree_inductive, "ru_inductive_tree.png") pm4py.save_vis_heuristics_net(heu_net, "ru_heunet.png") pm4py.save_vis_dfg(dfg, dfg_sa, dfg_ea, "ru_dfg.png") if ENABLE_VISUALIZATION: pm4py.view_petri_net(petri_alpha, im_alpha, fm_alpha, format="svg") pm4py.view_petri_net(petri_inductive, im_inductive, fm_inductive, format="svg") pm4py.view_petri_net(petri_heuristics, im_heuristics, fm_heuristics, format="svg") pm4py.view_process_tree(tree_inductive, format="svg") pm4py.view_heuristics_net(heu_net, format="svg") pm4py.view_dfg(dfg, dfg_sa, dfg_ea, format="svg") aligned_traces = pm4py.conformance_alignments(log1, petri_inductive, im_inductive, fm_inductive) replayed_traces = pm4py.conformance_tbr(log1, petri_inductive, im_inductive, fm_inductive) fitness_tbr = pm4py.evaluate_fitness_tbr(log1, petri_inductive, im_inductive, fm_inductive) print("fitness_tbr", fitness_tbr) fitness_align = pm4py.evaluate_fitness_alignments(log1, petri_inductive, im_inductive, fm_inductive) print("fitness_align", fitness_align) precision_tbr = pm4py.evaluate_precision_tbr(log1, petri_inductive, im_inductive, fm_inductive) print("precision_tbr", precision_tbr) precision_align = pm4py.evaluate_precision_alignments( log1, petri_inductive, im_inductive, fm_inductive) print("precision_align", precision_align) print("log start activities = ", pm4py.get_start_activities(log2)) print("df start activities = ", pm4py.get_start_activities(df2)) print("log end activities = ", pm4py.get_end_activities(log2)) print("df end activities = ", pm4py.get_end_activities(df2)) print("log attributes = ", pm4py.get_attributes(log2)) print("df attributes = ", pm4py.get_attributes(df2)) print("log org:resource values = ", pm4py.get_attribute_values(log2, "org:resource")) print("df org:resource values = ", pm4py.get_attribute_values(df2, "org:resource")) print("start_activities len(filt_log) = ", len(pm4py.filter_start_activities(log2, ["register request"]))) print("start_activities len(filt_df) = ", len(pm4py.filter_start_activities(df2, ["register request"]))) print("end_activities len(filt_log) = ", len(pm4py.filter_end_activities(log2, ["pay compensation"]))) print("end_activities len(filt_df) = ", len(pm4py.filter_end_activities(df2, ["pay compensation"]))) print( "attributes org:resource len(filt_log) (cases) cases = ", len( pm4py.filter_attribute_values(log2, "org:resource", ["Ellen"], level="case"))) print( "attributes org:resource len(filt_log) (cases) events = ", len( pm4py.filter_attribute_values(log2, "org:resource", ["Ellen"], level="event"))) print( "attributes org:resource len(filt_df) (events) cases = ", len( pm4py.filter_attribute_values(df2, "org:resource", ["Ellen"], level="case"))) print( "attributes org:resource len(filt_df) (events) events = ", len( pm4py.filter_attribute_values(df2, "org:resource", ["Ellen"], level="event"))) print( "attributes org:resource len(filt_df) (events) events notpositive = ", len( pm4py.filter_attribute_values(df2, "org:resource", ["Ellen"], level="event", retain=False))) print("variants log = ", pm4py.get_variants(log2)) print("variants df = ", pm4py.get_variants(df2)) print( "variants filter log = ", len( pm4py.filter_variants(log2, [[ "register request", "examine thoroughly", "check ticket", "decide", "reject request" ]]))) print( "variants filter df = ", len( pm4py.filter_variants(df2, [[ "register request", "examine thoroughly", "check ticket", "decide", "reject request" ]]))) print("variants filter percentage = ", len(pm4py.filter_variants_percentage(log2, threshold=0.8))) print( "paths filter log len = ", len( pm4py.filter_directly_follows_relation( log2, [("register request", "examine casually")]))) print( "paths filter dataframe len = ", len( pm4py.filter_directly_follows_relation( df2, [("register request", "examine casually")]))) print( "timeframe filter log events len = ", len( pm4py.filter_time_range(log2, "2011-01-01 00:00:00", "2011-02-01 00:00:00", mode="events"))) print( "timeframe filter log traces_contained len = ", len( pm4py.filter_time_range(log2, "2011-01-01 00:00:00", "2011-02-01 00:00:00", mode="traces_contained"))) print( "timeframe filter log traces_intersecting len = ", len( pm4py.filter_time_range(log2, "2011-01-01 00:00:00", "2011-02-01 00:00:00", mode="traces_intersecting"))) print( "timeframe filter df events len = ", len( pm4py.filter_time_range(df2, "2011-01-01 00:00:00", "2011-02-01 00:00:00", mode="events"))) print( "timeframe filter df traces_contained len = ", len( pm4py.filter_time_range(df2, "2011-01-01 00:00:00", "2011-02-01 00:00:00", mode="traces_contained"))) print( "timeframe filter df traces_intersecting len = ", len( pm4py.filter_time_range(df2, "2011-01-01 00:00:00", "2011-02-01 00:00:00", mode="traces_intersecting"))) # remove the temporary files os.remove("ru1.xes") os.remove("ru_dfg.dfg") os.remove("ru_alpha.pnml") os.remove("ru_inductive.pnml") os.remove("ru_heuristics.pnml") os.remove("ru_inductive.ptml") os.remove("ru_alpha.png") os.remove("ru_inductive.png") os.remove("ru_heuristics.png") os.remove("ru_inductive_tree.png") os.remove("ru_heunet.png") os.remove("ru_dfg.png")
def test_efg(self): log = pm4py.read_xes("input_data/running-example.xes") pm4py.discover_eventually_follows_graph(log)
def execute_script(): log = pm4py.read_xes(os.path.join("..", "tests", "input_data", "interval_event_log.xes")) print(cycle_time_get.apply(log, parameters={cycle_time_get.Parameters.START_TIMESTAMP_KEY: "start_timestamp", cycle_time_get.Parameters.TIMESTAMP_KEY: "time:timestamp"}))
def test_inductive_miner_tree(self): log = pm4py.read_xes("input_data/running-example.xes") tree = pm4py.discover_process_tree_inductive(log) tree = pm4py.discover_process_tree_inductive(log, noise_threshold=0.2)
def read_xes(path): log = pm4py.read_xes(path) from pm4py.objects.conversion.log import converter as log_converter return log_converter.apply(log, variant=log_converter.Variants.TO_DATA_FRAME)
def test_dfg(self): log = pm4py.read_xes("input_data/running-example.xes") dfg, sa, ea = pm4py.discover_directly_follows_graph(log)
def test_new_statistics_log(self): log = pm4py.read_xes("input_data/running-example.xes") pm4py.get_trace_attribute_values(log, "creator") pm4py.discover_eventually_follows_graph(log) pm4py.get_case_arrival_average(log)
def test_tbr(self): log = pm4py.read_xes("input_data/running-example.xes") net, im, fm = pm4py.discover_petri_net_inductive(log) replayed_traces = pm4py.conformance_diagnostics_token_based_replay( log, net, im, fm)
def test_serialization_log(self): log = pm4py.read_xes("input_data/running-example.xes") ser = pm4py.serialize(log) log2 = pm4py.deserialize(ser)
def test_precision_tbr(self): log = pm4py.read_xes("input_data/running-example.xes") net, im, fm = pm4py.discover_petri_net_inductive(log) precision_tbr = pm4py.precision_token_based_replay(log, net, im, fm)
def test_minimum_self_distance_2(self): import pm4py log = pm4py.read_xes(os.path.join("input_data", "running-example.xes")) msd = pm4py.get_minimum_self_distance_witnesses(log)
def test_alpha_miner(self): log = pm4py.read_xes("input_data/running-example.xes") net, im, fm = pm4py.discover_petri_net_alpha(log)
def importLog(path, name): log = pm.read_xes(os.path.join(path, name + ".xes")) return log