def test_importingExportingPetri(self): # to avoid static method warnings in tests, # that by construction of the unittest package have to be expressed in such way self.dummy_variable = "dummy_value" imported_petri1, marking1, fmarking1 = petri_importer.import_net( os.path.join(INPUT_DATA_DIR, "running-example.pnml")) soundness = check_soundness.check_petri_wfnet_and_soundness( imported_petri1) del soundness petri_exporter.export_net( imported_petri1, marking1, os.path.join(OUTPUT_DATA_DIR, "running-example.pnml")) imported_petri2, marking2, fmarking2 = petri_importer.import_net( os.path.join(OUTPUT_DATA_DIR, "running-example.pnml")) soundness = check_soundness.check_petri_wfnet_and_soundness( imported_petri2) del soundness self.assertEqual(sorted([x.name for x in imported_petri1.places]), sorted([x.name for x in imported_petri2.places])) self.assertEqual(sorted([x.name for x in imported_petri1.transitions]), sorted([x.name for x in imported_petri2.transitions])) self.assertEqual( sorted( [x.source.name + x.target.name for x in imported_petri1.arcs]), sorted( [x.source.name + x.target.name for x in imported_petri2.arcs])) self.assertEqual([x.name for x in marking1], [x.name for x in marking2]) os.remove(os.path.join(OUTPUT_DATA_DIR, "running-example.pnml"))
def test_applyAlphaMinerToCSV(self): # to avoid static method warnings in tests, # that by construction of the unittest package have to be expressed in such way self.dummy_variable = "dummy_value" # calculate and compare Petri nets obtained on the same log to verify that instances # are working correctly log1, net1, marking1, fmarking1 = self.obtainPetriNetThroughAlphaMiner( os.path.join(INPUT_DATA_DIR, "running-example.csv")) log2, net2, marking2, fmarking2 = self.obtainPetriNetThroughAlphaMiner( os.path.join(INPUT_DATA_DIR, "running-example.csv")) log1 = sorting.sort_timestamp(log1) log1 = sampling.sample(log1) log1 = index_attribute.insert_trace_index_as_event_attribute(log1) log2 = sorting.sort_timestamp(log2) log2 = sampling.sample(log2) log2 = index_attribute.insert_trace_index_as_event_attribute(log2) petri_exporter.export_net( net1, marking1, os.path.join(OUTPUT_DATA_DIR, "running-example.pnml")) os.remove(os.path.join(OUTPUT_DATA_DIR, "running-example.pnml")) self.assertEqual(len(net1.places), len(net2.places)) self.assertEqual(len(net1.transitions), len(net2.transitions)) self.assertEqual(len(net1.arcs), len(net2.arcs)) final_marking = petri.petrinet.Marking() for p in net1.places: if not p.out_arcs: final_marking[p] = 1 aligned_traces = token_replay.apply_log(log1, net1, marking1, final_marking) self.assertEqual(aligned_traces, aligned_traces)
def execute_script(): log_path = os.path.join("..", "tests", "input_data", "running-example.xes") log = xes_importer.apply(log_path) # obtain Petri net through Alpha Miner net, initial_marking, final_marking = alpha_miner.apply(log) # obtain stochastic information for transitions in the model s_map = stochastic_map.get_map_from_log_and_net( log, net, initial_marking, final_marking, force_distribution="EXPONENTIAL") # export the current stochastic Petri net petri_exporter.export_net(net, initial_marking, "example.pnml", final_marking=final_marking, stochastic_map=s_map) # re-import the current stochastic Petri net from file net, initial_marking, final_marking, s_map = petri_importer.import_net( "example.pnml", return_stochastic_information=True) # remove temporary file os.remove("example.pnml") # gets the reachability graph from the Petri net reachab_graph = construct_reachability_graph(net, initial_marking) # get the tangible reachability graph from the reachability graph and the stochastic map tang_reach_graph = tangible_reachability.get_tangible_reachability_from_reachability( reachab_graph, s_map) # visualize the tangible reachability graph on the screen viz = ts_vis_factory.apply(tang_reach_graph, parameters={ "format": "svg", "show_labels": True, "show_names": True }) ts_vis_factory.view(viz) # gets the Q matrix assuming exponential distributions q_matrix = ctmc.get_q_matrix_from_tangible_exponential( tang_reach_graph, s_map) # pick a state to start from states = sorted(list(tang_reach_graph.states), key=lambda x: x.name) state = states[0] print("\n\nstarting from state = ", state.name) # do transient analysis after 1 day transient_result = ctmc.transient_analysis_from_tangible_q_matrix_and_single_state( tang_reach_graph, q_matrix, state, 86400) print("\nprobability for each state after 1 day = ", transient_result) # do transient analysis after 10 days transient_result = ctmc.transient_analysis_from_tangible_q_matrix_and_single_state( tang_reach_graph, q_matrix, state, 864000) print("\nprobability for each state after 10 days = ", transient_result) # do transient analysis after 100 days transient_result = ctmc.transient_analysis_from_tangible_q_matrix_and_single_state( tang_reach_graph, q_matrix, state, 8640000) print("\nprobability for each state after 100 days = ", transient_result) steady_state = ctmc.steadystate_analysis_from_tangible_q_matrix( tang_reach_graph, q_matrix) print("\nsteady state = ", steady_state)
def test_importingExportingStochasticNet(self): # to avoid static method warnings in tests, # that by construction of the unittest package have to be expressed in such way self.dummy_variable = "dummy_value" imported_petri1, marking1, fmarking1, stochastic_info1 = petri_importer.import_net( os.path.join(INPUT_DATA_DIR, "stochastic_running_example.pnml")) petri_exporter.export_net(imported_petri1, marking1, os.path.join(OUTPUT_DATA_DIR, "stochastic_running_example.pnml"), final_marking=fmarking1) os.remove(os.path.join(OUTPUT_DATA_DIR, "stochastic_running_example.pnml"))
def SPAlphaMiner(context): args = context.args log = convert_df_pm_format(args.inputData) net, initial_marking, final_marking = alpha_miner.apply(log) gviz = pn_vis_factory.apply(net, initial_marking, final_marking) pn_vis_factory.save(gviz, os.path.join(args.outputData, "alpha-miner.png")) pnml_exporter.export_net( net, initial_marking, os.path.join(args.outputData, "petri_final.pnml"), final_marking=final_marking, ) return args.outputData
def export_file(orig_tree, activity_set): """ Export petri net to pnml file :param orig_tree: string of the tree from the main algorithm :param activity_set: set of all the activities :return: pnml file """ string_tree = tree_for_eval(orig_tree, activity_set) tree = pt_util.parse(string_tree) net, initial_marking, final_marking = tree_to_petri.apply(tree) file_name = config.data_file[:config.data_file.find('.')] + '_' + str( config.silhouette_threshold) + '.pnml' output_file = os.path.join(config.base_directory, config.data_dir, file_name) petri_exporter.export_net(net, initial_marking, output_file, final_marking=final_marking)
def test_alphaMinerVisualizationFromXES(self): # to avoid static method warnings in tests, # that by construction of the unittest package have to be expressed in such way self.dummy_variable = "dummy_value" log, net, marking, fmarking = self.obtainPetriNetThroughAlphaMiner( os.path.join(INPUT_DATA_DIR, "running-example.xes")) log = sorting.sort_timestamp(log) log = sampling.sample(log) log = index_attribute.insert_trace_index_as_event_attribute(log) petri_exporter.export_net(net, marking, os.path.join(OUTPUT_DATA_DIR, "running-example.pnml")) os.remove(os.path.join(OUTPUT_DATA_DIR, "running-example.pnml")) gviz = pn_viz.graphviz_visualization(net) self.assertEqual(gviz, gviz) final_marking = petri.petrinet.Marking() for p in net.places: if not p.out_arcs: final_marking[p] = 1 aligned_traces = token_replay.apply_log(log, net, marking, fmarking) self.assertEqual(aligned_traces, aligned_traces)
print("loaded log") activity_key = "concept:name" if classifier_key is not None: activity_key = classifier_key parameters_discovery = { pmutil.constants.PARAMETER_CONSTANT_ACTIVITY_KEY: activity_key, pmutil.constants.PARAMETER_CONSTANT_ATTRIBUTE_KEY: activity_key } t1 = time.time() alpha_model, alpha_initial_marking, alpha_final_marking = alpha.apply( log, parameters=parameters_discovery) pnml_exporter.export_net(alpha_model, alpha_initial_marking, os.path.join( pnmlFolder, logNamePrefix + "_alpha.pnml"), final_marking=alpha_final_marking) t2 = time.time() print("time interlapsed for calculating Alpha Model", (t2 - t1)) print( "alpha is_sound_wfnet", check_soundness.check_petri_wfnet_and_soundness(alpha_model, debug=True)) t1 = time.time() heu_model, heu_initial_marking, heu_final_marking = heuristics_miner.apply( log, parameters=parameters_discovery) pnml_exporter.export_net(heu_model, heu_initial_marking, os.path.join(
logPath = os.path.join(logFolder, logName) log = xes_factory.import_log(logPath, variant="iterparse") log, classifier_key = insert_classifier.search_act_class_attr(log) print("loaded log") activity_key = "concept:name" if classifier_key is not None: activity_key = classifier_key parameters_discovery = {pmutil.constants.PARAMETER_CONSTANT_ACTIVITY_KEY: activity_key, pmutil.constants.PARAMETER_CONSTANT_ATTRIBUTE_KEY: activity_key} t1 = time.time() alpha_model, alpha_initial_marking, alpha_final_marking = alpha.apply(log, parameters=parameters_discovery) pnml_exporter.export_net(alpha_model, alpha_initial_marking, os.path.join(pnmlFolder, logNamePrefix + "_alpha.pnml")) t2 = time.time() print("time interlapsed for calculating Alpha Model", (t2 - t1)) t1 = time.time() inductive_model, inductive_im, inductive_fm = inductive.apply(log, parameters=parameters_discovery) pnml_exporter.export_net(inductive_model, inductive_im, os.path.join(pnmlFolder, logNamePrefix + "_inductive.pnml")) t2 = time.time() print("time interlapsed for calculating Inductive Model", (t2 - t1)) parameters = {pmutil.constants.PARAMETER_CONSTANT_ACTIVITY_KEY: activity_key, pmutil.constants.PARAMETER_CONSTANT_ATTRIBUTE_KEY: activity_key, "format": "png"} alpha_vis = petri_vis_factory.apply(alpha_model, alpha_initial_marking, alpha_final_marking, log=log, parameters=parameters, variant="frequency")
pnmlFolder = "simple_pnml" pngFolder = "simple_png" for logName in os.listdir(logFolder): if "." in logName: logNamePrefix = logName.split(".")[0] print("\nelaborating " + logName) logPath = os.path.join(logFolder, logName) log = xes_factory.import_log(logPath, variant="iterparse") net, initial_marking, final_marking = simple_model_factory.apply( log, classic_output=True) pnml_exporter.export_net(net, initial_marking, os.path.join(pnmlFolder, logNamePrefix) + ".pnml", final_marking=final_marking) gviz = petri_vis_factory.apply(net, initial_marking, final_marking, log=log, variant="frequency") petri_vis_factory.save( gviz, os.path.join(pngFolder, logNamePrefix) + ".png")
net.transitions.add(t_1) net.transitions.add(t_2) # Add arcs from pm4py.objects.petri import utils utils.add_arc_from_to(source, t_1, net) utils.add_arc_from_to(t_1, p_1, net) utils.add_arc_from_to(p_1, t_2, net) utils.add_arc_from_to(t_2, sink, net) # Adding tokens initial_marking = Marking() initial_marking[source] = 1 final_marking = Marking() final_marking[sink] = 1 from pm4py.objects.petri.exporter import pnml as pnml_exporter pnml_exporter.export_net(net, initial_marking, "createdPetriNet1.pnml", final_marking=final_marking) from pm4py.visualization.petrinet import factory as pn_vis_factory parameters = {"format": "png"} gviz = pn_vis_factory.apply(net, initial_marking, final_marking, parameters=parameters) pn_vis_factory.save(gviz, "alpha_invoice.png") # from pm4py.visualization.petrinet import factory as pn_vis_factory # parameters = {"format":"svg"} # gviz = pn_vis_factory.apply(net, initial_marking, final_marking, parameters=parameters) # pn_vis_factory.save(gviz, "alpha.svg")
from pm4py.objects.log.importer.xes import factory as xes_import_factory from pm4py.objects.petri.exporter import pnml as pnml_exporter from pm4py.objects.petri.importer import pnml as pnml_importer from pm4py.algo.discovery.heuristics import factory as heuristics_miner from pydream.LogWrapper import LogWrapper from pydream.EnhancedPN import EnhancedPN from pydream.predictive.nap.NAP import NAP from pydream.util.TimedStateSamples import loadTimedStateSamples if __name__ == "__main__": log = xes_import_factory.apply('YOUR_EVENTLOG.xes') net, im, fm = heuristics_miner.apply( log, parameters={"dependency_thresh": 0.99}) pnml_exporter.export_net(net, im, "discovered_pn.pnml") net, initial_marking, final_marking = pnml_importer.import_net( "discovered_pn.pnml") log_wrapper = LogWrapper(log) enhanced_pn = EnhancedPN(net, initial_marking) enhanced_pn.enhance(log_wrapper) enhanced_pn.saveToFile("enhanced_discovered_pn.json") enhanced_pn = EnhancedPN(net, initial_marking, decay_function_file="enhanced_discovered_pn.json") tss_json, tss_objs = enhanced_pn.decay_replay(log_wrapper=log_wrapper) with open("timedstatesamples.json", 'w') as fp: