def test_importingExportingPetri(self): # to avoid static method warnings in tests, # that by construction of the unittest package have to be expressed in such way self.dummy_variable = "dummy_value" imported_petri1, marking1, fmarking1 = petri_importer.apply( os.path.join(INPUT_DATA_DIR, "running-example.pnml")) soundness = check_soundness.check_petri_wfnet_and_soundness( imported_petri1) del soundness petri_exporter.apply( imported_petri1, marking1, os.path.join(OUTPUT_DATA_DIR, "running-example.pnml")) imported_petri2, marking2, fmarking2 = petri_importer.apply( os.path.join(OUTPUT_DATA_DIR, "running-example.pnml")) soundness = check_soundness.check_petri_wfnet_and_soundness( imported_petri2) del soundness self.assertEqual(sorted([x.name for x in imported_petri1.places]), sorted([x.name for x in imported_petri2.places])) self.assertEqual(sorted([x.name for x in imported_petri1.transitions]), sorted([x.name for x in imported_petri2.transitions])) self.assertEqual( sorted( [x.source.name + x.target.name for x in imported_petri1.arcs]), sorted( [x.source.name + x.target.name for x in imported_petri2.arcs])) self.assertEqual([x.name for x in marking1], [x.name for x in marking2]) os.remove(os.path.join(OUTPUT_DATA_DIR, "running-example.pnml"))
def test_importingPetriLogAlignment(self): # to avoid static method warnings in tests, # that by construction of the unittest package have to be expressed in such way self.dummy_variable = "dummy_value" imported_petri1, marking1, fmarking1 = petri_importer.apply( os.path.join(INPUT_DATA_DIR, "running-example.pnml")) log = xes_importer.apply( os.path.join(INPUT_DATA_DIR, "running-example.xes")) final_marking = petri.petrinet.Marking() for p in imported_petri1.places: if not p.out_arcs: final_marking[p] = 1 for trace in log: cf_result = align_alg.apply( trace, imported_petri1, marking1, final_marking, variant=align_alg.VERSION_DIJKSTRA_NO_HEURISTICS)['alignment'] is_fit = True for couple in cf_result: if not (couple[0] == couple[1] or couple[0] == ">>" and couple[1] is None): is_fit = False if not is_fit: raise Exception("should be fit")
def test_51(self): import os from pm4py.objects.petri.importer import importer as pnml_importer net, initial_marking, final_marking = pnml_importer.apply( os.path.join("input_data", "running-example.pnml")) from pm4py.visualization.petrinet import visualizer as pn_visualizer gviz = pn_visualizer.apply(net, initial_marking, final_marking) from pm4py.objects.petri.exporter import exporter as pnml_exporter pnml_exporter.apply(net, initial_marking, "petri.pnml") pnml_exporter.apply(net, initial_marking, "petri_final.pnml", final_marking=final_marking) os.remove("petri.pnml") os.remove("petri_final.pnml") from pm4py.objects.petri import semantics transitions = semantics.enabled_transitions(net, initial_marking) places = net.places transitions = net.transitions arcs = net.arcs for place in places: stru = "\nPLACE: " + place.name for arc in place.in_arcs: stru = str(arc.source.name) + " " + str(arc.source.label)
def petrinethandler(self): """ Loads the pnml file from the path :return: The petrinet with its initial and final marking """ petrinet, initial_marking, final_marking = pnml_importer.apply( self.pathnet) return petrinet, initial_marking, final_marking
def test_importingPetriLogTokenReplay(self): # to avoid static method warnings in tests, # that by construction of the unittest package have to be expressed in such way self.dummy_variable = "dummy_value" imported_petri1, marking1, fmarking1 = petri_importer.apply( os.path.join(INPUT_DATA_DIR, "running-example.pnml")) log = xes_importer.apply( os.path.join(INPUT_DATA_DIR, "running-example.xes")) aligned_traces = token_replay.apply(log, imported_petri1, marking1, fmarking1) del aligned_traces
def read_petri_net(file_path): """ Reads a Petri net from the .PNML format Parameters ---------------- file_path File path Returns ---------------- petri_net Petri net object initial_marking Initial marking final_marking Final marking """ from pm4py.objects.petri.importer import importer as pnml_importer net, im, fm = pnml_importer.apply(file_path) return net, im, fm
def read_petri_net(file_path: str) -> Tuple[PetriNet, Marking, Marking]: warnings.warn('read_petri_net is deprecated, use read_pnml instead', DeprecationWarning) """ Reads a Petri net from the .PNML format Parameters ---------------- file_path File path Returns ---------------- petri_net Petri net object initial_marking Initial marking final_marking Final marking """ from pm4py.objects.petri.importer import importer as pnml_importer net, im, fm = pnml_importer.apply(file_path) return net, im, fm
def execute_script(): log_path = os.path.join("..", "tests", "input_data", "running-example.xes") pnml_path = os.path.join("..", "tests", "input_data", "running-example.pnml") # log_path = 'C:/Users/bas/Documents/tue/svn/private/logs/a32_logs/a32f0n05.xes' # pnml_path = 'C:/Users/bas/Documents/tue/svn/private/logs/a32_logs/a32.pnml' log = xes_importer.apply(log_path) net, marking, fmarking = petri_importer.apply(pnml_path) model_cost_function = dict() sync_cost_function = dict() for t in net.transitions: if t.label is not None: model_cost_function[t] = 1000 sync_cost_function[t] = 0 else: model_cost_function[t] = 1 alignments = ali.algorithm.apply(log, net, marking, fmarking) print(alignments) pretty_print_alignments(alignments)
def get_partial_models(directory): return [pnml_importer.apply(join(directory, f)) for f in listdir(directory) if isfile(join(directory, f))]
from pm4py.objects.petri.exporter import exporter as pnml_exporter from pm4py.objects.petri.importer import importer as pnml_importer from pm4py.algo.discovery.inductive import algorithm as inductive_miner from pydream.LogWrapper import LogWrapper from pydream.EnhancedPN import EnhancedPN from pydream.predictive.nap.NAP import NAP if __name__ == "__main__": log = xes_importer.apply('sample_data\\toy.xes') net, im, fm = inductive_miner.apply(log) pnml_exporter.apply(net, im, "sample_data\\discovered_pn.pnml", fm) net, initial_marking, final_marking = pnml_importer.apply( "sample_data\\discovered_pn.pnml") log_wrapper = LogWrapper(log) enhanced_pn = EnhancedPN(net, initial_marking) enhanced_pn.enhance(log_wrapper) enhanced_pn.saveToFile("sample_data\\enhanced_discovered_pn.json") enhanced_pn = EnhancedPN( net, initial_marking, decay_function_file="sample_data\\enhanced_discovered_pn.json") tss_json, tss_objs = enhanced_pn.decay_replay(log_wrapper=log_wrapper) with open("sample_data\\timedstatesamples.json", 'w') as fp: json.dump(tss_json, fp)
def run_verifier(file): net, initial_marking, final_marking = petri_importer.apply(file) cycles = utils.get_cycles_petri_net_places(net) soundness = check_easy_soundness_net_in_fin_marking( net, initial_marking, final_marking) return {"soundness": soundness, "cycles": len(cycles)}
import os from pm4py.objects.log.importer.xes import importer as xes_importer pnmlfile="D://process mining//HMM//pnml//reference//reference 7.pnml" import os from pm4py.objects.petri.importer import importer as pnml_importer net, initial_marking, final_marking = pnml_importer.apply(os.path.join(pnmlfile)) from pm4py.visualization.petrinet import visualizer as pn_visualizer gviz = pn_visualizer.apply(net, initial_marking, final_marking) pn_visualizer.view(gviz)
from pm4py.objects.log.importer.xes import importer as xes_importer from pm4py.objects.petri.importer import importer as pnml_importer from pm4py.evaluation.replay_fitness import evaluator as replay_evaluator from pm4py.evaluation.precision import evaluator as precision_evaluator from pm4py.evaluation.generalization import evaluator as generalization_evaluator from pm4py.evaluation.simplicity import evaluator as simplicity_evaluator # Eventlog log = xes_importer.apply("../patterns_file/BPI2017Denied" + '.xes') sub = ['3', '4', '15', '65', '92'] for s in sub: # Modello Rete net, initial_marking, final_marking = pnml_importer.apply( '../patterns_file/reti_Fahland/repaired_' + s + '_adjusted.pnml') print("\nValutazione rete sub_" + s + ":") fitness = replay_evaluator.apply( log, net, initial_marking, final_marking, variant=replay_evaluator.Variants.ALIGNMENT_BASED) print("Fitness: ", fitness) precision = precision_evaluator.apply( log, net, initial_marking, final_marking,
f.write(r.content) if not os.path.exists(BPIC2017_OFFER_LOG): print("downloading: " + BPIC2017_OFFER_LOG) r = requests.get(LOG_MODEL_REPOSITORY_URL + BPIC2017_OFFER_LOG) with open(BPIC2017_OFFER_LOG, 'wb') as f: f.write(r.content) if not os.path.exists(ROADTRAFFIC_CSV_GZ): print("downloading: " + ROADTRAFFIC_CSV_GZ) r = requests.get(LOG_MODEL_REPOSITORY_URL + ROADTRAFFIC_CSV_GZ) with open(ROADTRAFFIC_CSV_GZ, 'wb') as f: f.write(r.content) a32f0n00_log = xes_importer.apply(A32F0N00_LOG) a32f0n00_net, a32f0n00_im, a32f0n00_fm = petri_importer.apply(A32F0N00_NET) T1 = [0.0, 38.03, 0.0] T2 = [0.0, 3.03, 0.0] T3 = [0.0, 3.57, 0.0] T4 = [0.0, 20.50, 0.0] T5 = [0.0, 1.15, 0.0] T6 = [0.0, 1.06, 0.0] T7 = [0.0, 2.59, 0.0] T8 = [0.0, 1.07, 0.0] T9 = [0.0, 0.97, 0.0] T10 = [0.0, 3.69, 0.0] if DEBUG: if not os.path.exists("debug.csv"): F = open("debug.csv", "w")
from pyspark import SparkContext, SparkConf from pm4py.objects.log.importer.xes import importer as xes_importer from pm4py.objects.petri.importer import importer as pnml_importer import config import os from conformancechecking4spark.alignments import DistributedAlignmentConfiguration from conformancechecking4spark.heuristics.algorithm import sum_of_differences from conformancechecking4spark.utils import get_partial_models path_pms = os.path.join(config.ROOT_DIR, 'data/M2') conf = SparkConf().setAppName("test").setMaster("local[*]") sc = SparkContext(conf=conf) log = xes_importer.apply(os.path.join(config.ROOT_DIR, 'data/M2.xes')) net, initial_marking, final_marking = pnml_importer.apply(os.path.join(config.ROOT_DIR, 'data/M2_petri_pnml.pnml')) nets = get_partial_models(path_pms) log_rdd = sc.parallelize(log) # pm_rdd = sc.parallelize([(net, initial_marking, final_marking)]) pm_rdd = sc.parallelize(nets) distr_alg = DistributedAlignmentConfiguration(log_rdd, pm_rdd, 500, 1, heuristic=sum_of_differences) distr_alg.apply().save_local(os.path.join(config.ROOT_DIR, 'data/results'))
import os from pm4py.objects.log.importer.xes import importer as xes_importer log = xes_importer.apply(filepath) for trace in log: for event in trace: event["customClassifier"] = event["concept:name"] + event["lifecycle:transition"] from pm4py.algo.discovery.alpha import algorithm as alpha_miner parameters = {alpha_miner.Variants.ALPHA_CLASSIC.value.Parameters.ACTIVITY_KEY: "customClassifier"} net, initial_marking, final_marking = alpha_miner.apply(log, parameters=parameters) #Petrinet Management import os from pm4py.objects.petri.importer import importer as pnml_importer net, initial_marking, final_marking = pnml_importer.apply(os.path.join("tests","input_data","running-example.pnml")) from pm4py.visualization.petrinet import visualizer as pn_visualizer gviz = pn_visualizer.apply(net, initial_marking, final_marking) pn_visualizer.view(gviz) from pm4py.objects.petri.exporter import exporter as pnml_exporter pnml_exporter.apply(net, initial_marking, "petri.pnml") pnml_exporter.apply(net, initial_marking, "petri_final.pnml", final_marking=final_marking) #tree generation from pm4py.simulation.tree_generator import simulator as tree_gen
import copy from pm4py.objects.log.importer.xes import importer as xes_importer from pm4py.objects.petri.importer import importer as petri_importer from da4py.main.conformanceChecking.conformanceArtefacts import ConformanceArtefacts ''' Observe that for this script, a slight modification of the code is requested: - in da4py.main.conformanceChecking.conformanceArtefacts.ConformanceArtefacts -> comment the solving of the formula which is not required -> modify return len(wncf.hard) in both multi-alignment and exact alignment class ''' net, m0, mf = petri_importer.apply("../../examples/medium/model2.pnml") log = xes_importer.apply("../../examples/medium/model2bis.xes") # CHART 1 : increasing the number of traces multi = [] exact = [] for i in range(0,110,10): print(i) log1 = copy.copy(log) log1._list = log._list[:i] artefacts = ConformanceArtefacts() artefacts.setDistance_type("edit") artefacts.setOptimizeSup(True) artefacts.setSize_of_run(10) artefacts.setMax_d(20)
n10+=1 if (x1_x2_in_c2 and not x1_x2_in_c1): n01+=1 print(n11,"N11, pairs are clustered together") print(n10,"N10, pairs are clustered in C1 but not C2") print(n01,"N01, pairs are clustered in C2 but not C1") return n11/(n01+n10+n11) ######################################################## # MAIN PROGRAM # ######################################################## # read the file net,m0,mf = import_pnml.apply("/Users/mboltenhagen/Documents/PhD/Josep&Thomas/markovian-accuracy/real-life-logs-models/im/2.pnml") log = import_xes.apply("/Users/mboltenhagen/Documents/PhD/Josep&Thomas/markovian-accuracy/real-life-logs-models/2.xes.gz") # prepare an empty log for the clustered traces clustered_traces = EventLog() # launch the AMSTC method print("########################################################") print("# AMSTC #") print("########################################################") clustering = samplingVariantsForAmstc(net, m0, mf, log,5,15, 0, 7, 2 ,maxCounter=1,editDistance=True,silent_label="tau", debug=1) # get a dict of my clustering, {trace:cluster number} myClustering = {} # for each cluster
from pm4py.algo.discovery.inductive import algorithm as inductive_miner from pm4py.objects.petri.importer import importer as pnml_importer from pm4py.visualization.petrinet import visualizer as pn_visualizer from pm4py.algo.conformance.alignments import algorithm as alignments import numpy as np import matplotlib.pyplot as plt import ruptures as rpt import datetime from array import * def getTime(elem): return elem[0] #variant = xes_importer.Variants.ITERPARSE #parameters = {variant.value.Parameters.TIMESTAMP_SORT: True} net, initial_marking, final_marking = pnml_importer.apply("helpdesk.pnml") log = xes_importer.apply(os.path.join("helpdesk.xes")) target_place_id = "n5" #n5 refers to the place after "Take in charge ticket" transitions_before_place = set() transitions_after_place = set() transitions_before_place_id = set() transitions_after_place_id = set() transitions_after_labels = {} places = net.places transitions = net.transitions arcs = net.arcs counter = 1 for p in places: #print(p.name) if p.name == target_place_id: