Example #1
0
	def test_importExportCSVtoCSV(self):
		eventLog = csv_importer.import_from_path(os.path.join(INPUT_DATA_DIR,"running-example.csv"))
		eventLog.sort()
		eventLog.sample()
		eventLog.insert_event_index_as_event_attribute()
		traceLog = log_transform.transform_event_log_to_trace_log(eventLog)
		traceLog.sort()
		traceLog.sample()
		traceLog.insert_trace_index_as_event_attribute()
		eventLogTransformed = log_transform.transform_trace_log_to_event_log(traceLog)
		csv_exporter.export_log(eventLogTransformed, os.path.join(OUTPUT_DATA_DIR,"running-example-exported.csv"))
		eventLogImportedAfterExport = csv_importer.import_from_path(os.path.join(OUTPUT_DATA_DIR,"running-example-exported.csv"))
		traceLogImportedAfterExport = log_transform.transform_event_log_to_trace_log(eventLogImportedAfterExport)
		self.assertEqual(len(traceLog), len(traceLogImportedAfterExport))
		os.remove(os.path.join(OUTPUT_DATA_DIR,"running-example-exported.csv"))
Example #2
0
def export_log_tree(log):
    """
    Get XES log XML tree from a PM4Py trace log

    Parameters
    -----------
    log
        PM4Py trace log

    Returns
    -----------
    tree
        XML tree
    """
    # If the log is in log_instance.EventLog, then transform it into log_instance.TraceLog format
    if type(log) is log_instance.EventLog:
        log = log_transform.transform_event_log_to_trace_log(log)
    root = etree.Element(xes_util.TAG_LOG)

    # add attributes at the log level
    export_attributes(log, root)
    # add extensions at the log level
    export_extensions(log, root)
    # add globals at the log level
    export_globals(log, root)
    # add classifiers at the log level
    export_classifiers(log, root)
    # add traces at the log level
    export_traces(log, root)

    tree = etree.ElementTree(root)

    return tree
Example #3
0
 def obtainPetriNetThroughAlphaMiner(self, logName):
     if ".xes" in logName:
         traceLog = xes_importer.import_from_file_xes(logName)
     else:
         eventLog = csv_importer.import_from_path(logName)
         traceLog = log_transform.transform_event_log_to_trace_log(eventLog)
     net, marking, fmarking = alpha_factory.apply(traceLog)
     return traceLog, net, marking, fmarking
	def test_importExportXEStoCSV(self):
		traceLog = xes_importer.import_from_file_xes(os.path.join(INPUT_DATA_DIR, "running-example.xes"))
		eventLog = log_transform.transform_trace_log_to_event_log(traceLog)
		csv_exporter.export_log(eventLog, os.path.join(OUTPUT_DATA_DIR,"running-example-exported.csv"))
		eventLogImportedAfterExport = csv_importer.import_from_path(os.path.join(OUTPUT_DATA_DIR,"running-example-exported.csv"))
		traceLogImportedAfterExport = log_transform.transform_event_log_to_trace_log(eventLogImportedAfterExport)
		self.assertEqual(len(traceLog), len(traceLogImportedAfterExport))
		os.remove(os.path.join(OUTPUT_DATA_DIR,"running-example-exported.csv"))
Example #5
0
 def obtainPetriNetThroughImdf(self, logName):
     if ".xes" in logName:
         traceLog = xes_importer.import_from_file_xes(logName)
     else:
         eventLog = csv_importer.import_from_path(logName)
         traceLog = log_transform.transform_event_log_to_trace_log(eventLog)
     imdf = InductMinDirFollows()
     net, marking, final_marking = imdf.apply(traceLog, None)
     return traceLog, net, marking, final_marking
 def test_prefiltering_dataframe(self):
     inputLog = os.path.join(INPUT_DATA_DIR, "running-example.csv")
     dataframe = csv_importer.import_dataframe_from_path_wo_timeconversion(inputLog, sep=',')
     dataframe = df_filtering.filter_df_on_activities(dataframe, activity_key="concept:name")
     dataframe = df_filtering.filter_df_on_ncases(dataframe, case_id_glue="case:concept:name")
     dataframe = df_filtering.filter_df_on_case_length(dataframe, case_id_glue="case:concept:name")
     dataframe = csv_importer.convert_timestamp_columns_in_df(dataframe)
     dataframe = dataframe.sort_values('time:timestamp')
     eventLog = csv_importer.convert_dataframe_to_event_log(dataframe)
     traceLog = transform.transform_event_log_to_trace_log(eventLog)
Example #7
0
    def test_csv1documentation(self):
        import os

        from pm4py.log.importer import csv as csv_importer

        event_log = csv_importer.import_from_path(
            "inputData\\running-example.csv", sep=",")

        event_log_length = len(event_log)
        # print(event_log_length)
        for event in event_log:
            # print(event)
            pass

        from pm4py.log import transform

        trace_log = transform.transform_event_log_to_trace_log(
            event_log, case_glue="case:concept:name")

        from pm4py.log.importer import csv as csv_importer
        from pm4py.log import transform

        dataframe = csv_importer.import_dataframe_from_path(
            "inputData\\running-example.csv", sep=",")
        event_log = csv_importer.convert_dataframe_to_event_log(dataframe)
        trace_log = transform.transform_event_log_to_trace_log(
            event_log, case_glue="case:concept:name")

        from pm4py.log.exporter import csv as csv_exporter

        csv_exporter.export_log(event_log, "outputFile1.csv")
        os.remove("outputFile1.csv")

        from pm4py.log.exporter import csv as csv_exporter

        csv_exporter.export_log(trace_log, "outputFile2.csv")
        os.remove("outputFile2.csv")
Example #8
0
def load_logs():
    """
    If enabled, load logs in the folder
    """
    if shared.config["logFolder"]["loadLogsAutomatically"]:
        shared.sem.acquire()
        # loading logs
        logsFolderPath = shared.config["logFolder"]["logFolderPath"]
        folderContent = os.listdir(logsFolderPath)
        for file in folderContent:
            fullPath = os.path.join(logsFolderPath, file)
            try:
                if os.path.isfile(fullPath):
                    logName = file.split(".")[0]
                    logExtension = file.split(".")[-1]
                    if not logName in shared.trace_logs:
                        if logExtension == "xes":
                            # load XES files
                            shared.trace_logs[
                                logName] = xes_importer.import_from_file_xes(
                                    fullPath)
                            shared.trace_logs[logName].sort()
                            shared.trace_logs[
                                logName].insert_trace_index_as_event_attribute(
                                )
                        elif logExtension == "csv":
                            # load CSV files
                            event_log = csv_importer.import_from_path(fullPath)
                            shared.trace_logs[
                                logName] = transform.transform_event_log_to_trace_log(
                                    event_log)
                            shared.trace_logs[logName].sort()
                            shared.trace_logs[
                                logName].insert_trace_index_as_event_attribute(
                                )
            except Exception as e:
                # manage exception
                logging.error("exception loading log: " + str(file) + ": " +
                              str(e))
                logging.error("traceback: " + traceback.format_exc())
        shared.sem.release()
Example #9
0
from pm4py.models.petri import visualize as pn_viz
from pm4py.log import transform as transformer
from tests.constants import INPUT_DATA_DIR

event = log_instance.Event({'concept:name': 'a'})
event['test'] = 'test'

# csv
start = time.time()
print(os.path.join(INPUT_DATA_DIR, "running-example.csv"))
log = csv_importer.import_from_path(
    os.path.join(INPUT_DATA_DIR, "running-example.csv"), ";")
print(time.time() - start)
print(len(log))
print(log)
trace_log = transformer.transform_event_log_to_trace_log(log,
                                                         case_glue='Case ID')
'''
start = time.time()
log2 = log_instance.EventLog(log.attributes, filter(lambda e: e['amount'] > 50.0, log))
print(time.time() - start)
print(len(log2))
'''


def compare_time_of_first_event(t1, t2):
    return (t1[0]['time:timestamp'] - t2[0]['time:timestamp']).microseconds


# xes
'''
start = time.time()