def test_importExportCSVtoCSV(self): eventLog = csv_importer.import_from_path(os.path.join(INPUT_DATA_DIR,"running-example.csv")) eventLog.sort() eventLog.sample() eventLog.insert_event_index_as_event_attribute() traceLog = log_transform.transform_event_log_to_trace_log(eventLog) traceLog.sort() traceLog.sample() traceLog.insert_trace_index_as_event_attribute() eventLogTransformed = log_transform.transform_trace_log_to_event_log(traceLog) csv_exporter.export_log(eventLogTransformed, os.path.join(OUTPUT_DATA_DIR,"running-example-exported.csv")) eventLogImportedAfterExport = csv_importer.import_from_path(os.path.join(OUTPUT_DATA_DIR,"running-example-exported.csv")) traceLogImportedAfterExport = log_transform.transform_event_log_to_trace_log(eventLogImportedAfterExport) self.assertEqual(len(traceLog), len(traceLogImportedAfterExport)) os.remove(os.path.join(OUTPUT_DATA_DIR,"running-example-exported.csv"))
def obtainPetriNetThroughAlphaMiner(self, logName): if ".xes" in logName: traceLog = xes_importer.import_from_file_xes(logName) else: eventLog = csv_importer.import_from_path(logName) traceLog = log_transform.transform_event_log_to_trace_log(eventLog) net, marking, fmarking = alpha_factory.apply(traceLog) return traceLog, net, marking, fmarking
def test_importExportXEStoCSV(self): traceLog = xes_importer.import_from_file_xes(os.path.join(INPUT_DATA_DIR, "running-example.xes")) eventLog = log_transform.transform_trace_log_to_event_log(traceLog) csv_exporter.export_log(eventLog, os.path.join(OUTPUT_DATA_DIR,"running-example-exported.csv")) eventLogImportedAfterExport = csv_importer.import_from_path(os.path.join(OUTPUT_DATA_DIR,"running-example-exported.csv")) traceLogImportedAfterExport = log_transform.transform_event_log_to_trace_log(eventLogImportedAfterExport) self.assertEqual(len(traceLog), len(traceLogImportedAfterExport)) os.remove(os.path.join(OUTPUT_DATA_DIR,"running-example-exported.csv"))
def obtainPetriNetThroughImdf(self, logName): if ".xes" in logName: traceLog = xes_importer.import_from_file_xes(logName) else: eventLog = csv_importer.import_from_path(logName) traceLog = log_transform.transform_event_log_to_trace_log(eventLog) imdf = InductMinDirFollows() net, marking, final_marking = imdf.apply(traceLog, None) return traceLog, net, marking, final_marking
def load_logs(): """ If enabled, load logs in the folder """ if shared.config["logFolder"]["loadLogsAutomatically"]: shared.sem.acquire() # loading logs logsFolderPath = shared.config["logFolder"]["logFolderPath"] folderContent = os.listdir(logsFolderPath) for file in folderContent: fullPath = os.path.join(logsFolderPath, file) try: if os.path.isfile(fullPath): logName = file.split(".")[0] logExtension = file.split(".")[-1] if not logName in shared.trace_logs: if logExtension == "xes": # load XES files shared.trace_logs[ logName] = xes_importer.import_from_file_xes( fullPath) shared.trace_logs[logName].sort() shared.trace_logs[ logName].insert_trace_index_as_event_attribute( ) elif logExtension == "csv": # load CSV files event_log = csv_importer.import_from_path(fullPath) shared.trace_logs[ logName] = transform.transform_event_log_to_trace_log( event_log) shared.trace_logs[logName].sort() shared.trace_logs[ logName].insert_trace_index_as_event_attribute( ) except Exception as e: # manage exception logging.error("exception loading log: " + str(file) + ": " + str(e)) logging.error("traceback: " + traceback.format_exc()) shared.sem.release()
def test_csv1documentation(self): import os from pm4py.log.importer import csv as csv_importer event_log = csv_importer.import_from_path( "inputData\\running-example.csv", sep=",") event_log_length = len(event_log) # print(event_log_length) for event in event_log: # print(event) pass from pm4py.log import transform trace_log = transform.transform_event_log_to_trace_log( event_log, case_glue="case:concept:name") from pm4py.log.importer import csv as csv_importer from pm4py.log import transform dataframe = csv_importer.import_dataframe_from_path( "inputData\\running-example.csv", sep=",") event_log = csv_importer.convert_dataframe_to_event_log(dataframe) trace_log = transform.transform_event_log_to_trace_log( event_log, case_glue="case:concept:name") from pm4py.log.exporter import csv as csv_exporter csv_exporter.export_log(event_log, "outputFile1.csv") os.remove("outputFile1.csv") from pm4py.log.exporter import csv as csv_exporter csv_exporter.export_log(trace_log, "outputFile2.csv") os.remove("outputFile2.csv")
from pm4py.log.importer import xes as xes_importer from pm4py.algo.dfg.versions import native as dfg_instance from pm4py.algo.causal import factory as causal_instance from pm4py.algo.alpha.versions import classic as alpha_classic from pm4py.log.importer import csv as csv_importer from pm4py.models.petri import visualize as pn_viz from pm4py.log import transform as transformer from tests.constants import INPUT_DATA_DIR event = log_instance.Event({'concept:name': 'a'}) event['test'] = 'test' # csv start = time.time() print(os.path.join(INPUT_DATA_DIR, "running-example.csv")) log = csv_importer.import_from_path( os.path.join(INPUT_DATA_DIR, "running-example.csv"), ";") print(time.time() - start) print(len(log)) print(log) trace_log = transformer.transform_event_log_to_trace_log(log, case_glue='Case ID') ''' start = time.time() log2 = log_instance.EventLog(log.attributes, filter(lambda e: e['amount'] > 50.0, log)) print(time.time() - start) print(len(log2)) ''' def compare_time_of_first_event(t1, t2): return (t1[0]['time:timestamp'] - t2[0]['time:timestamp']).microseconds