def case_filter_plt(request):

    event_logs_path = os.path.join(settings.MEDIA_ROOT, "event_logs")
    log_information = None
    # TODO Load the Log Information, else throw/redirect to Log Selection
    if "current_log" in request.session and request.session["current_log"] is not None:
        log_information = request.session["current_log"]

    if log_information is not None:

        event_log = os.path.join(event_logs_path, log_information["log_name"])
        log_format = log_import.get_log_format(log_information["log_name"])

        # Import the Log considering the given Format
        log, activities = log_import.log_import(event_log, log_format, log_information)

    if request.method == "POST":
        selected_case = request.POST["selected_case"]
        df = plotting.create_df_case(log, log_format, [selected_case], log_information)

        plot_div = plotting.timeframe_plot(df)
        html = loader.render_to_string("view_plot.html", {"plot_div": plot_div})
        return HttpResponse(html)

    else:
        print("DEBUG: POST URL REQUESTED!")
def case_filter_dfg(request):
    event_logs_path = os.path.join(settings.MEDIA_ROOT, "event_logs")
    log_information = None
    # TODO Load the Log Information, else throw/redirect to Log Selection
    if "current_log" in request.session and request.session["current_log"] is not None:
        log_information = request.session["current_log"]
        print(log_information)

    if log_information is not None:

        event_log = os.path.join(event_logs_path, log_information["log_name"])
        log_format = log_import.get_log_format(log_information["log_name"])

        # Import the Log considering the given Format
        log, activities = log_import.log_import(event_log, log_format, log_information)

    if request.method == "POST":
        selected_case = request.POST["selected_case"]
       
        if log_format == "xes": 
            filtered_log = pm4py.filter_trace_attribute_values(
                log, log_information["case_id"], [selected_case], retain=True)
        else: 
            filtered_log = log[log["case:concept:name"].isin([selected_case])]

        dfg = dfg_discovery.apply(filtered_log)
        this_data, temp_file = plotting.dfg_to_g6(dfg)
        re.escape(temp_file)
    message = {
        "success": True,
        "data": json.dumps(this_data),
        "responseText": "Inactivated successfully!",
    }
    return JsonResponse(message)
Example #3
0
def group_analysis(request):
    event_logs_path = os.path.join(settings.MEDIA_ROOT, "event_logs")
    log_information = None
    active_group_details = None

    # TODO Running Example, on how to display Plot

    # Use this to include it in the UI

    # TODO Load the Log Information, else throw/redirect to Log Selection
    if "current_log" in request.session and request.session[
            "current_log"] is not None:
        log_information = request.session["current_log"]
        print("Log Information: ", log_information)

    # TODO Get the Groups, from the Post
    if log_information is not None:

        event_log = os.path.join(event_logs_path, log_information["log_name"])

        log_format = log_import.get_log_format(log_information["log_name"])

        # Import the Log considering the given Format
        _, activities = log_import.log_import(event_log, log_format,
                                              log_information)

        # Set the activities to the activities of the loaded log.
        request.session["activities"] = list(activities)
        active_group_details = get_active_groups(request)

        return render(
            request,
            "group_analysis.html",
            {
                "log_name": settings.EVENT_LOG_NAME,
                "active_group_details": active_group_details,
            },
        )

    else:

        if check_group_managment(request):

            active_group_details = get_active_groups(request)

        return render(
            request,
            "group_analysis.html",
            {"active_group_details": active_group_details},
        )
def perspective(request):
    event_logs_path = os.path.join(settings.MEDIA_ROOT, "event_logs")
    load_log_succes = False
    log_information = None

    # TODO Load the Log Information, else throw/redirect to Log Selection
    if "current_log" in request.session and request.session["current_log"] is not None:
        log_information = request.session["current_log"]
        print(log_information)

    if log_information is not None:

        event_log = os.path.join(event_logs_path, log_information["log_name"])
        log_format = log_import.get_log_format(log_information["log_name"])

        # Import the Log considering the given Format
        log, activities = log_import.log_import(event_log, log_format, log_information)
        load_log_succes = True

    if request.method == "POST":
        # TODO Throw some error
        print("Not yet implemented")

    else:

        if load_log_succes:
            result = stats.get_log_statistics(log, log_format, log_information)
            dfg = dfg_discovery.apply(log)
            this_data, temp_file = plotting.dfg_to_g6(dfg)
            re.escape(temp_file)
            result["Nunique_Activities"] = len(activities)
            return render(
                request,
                "perspective_view.html",
                {
                    "log_name": settings.EVENT_LOG_NAME,
                    "json_file": temp_file,
                    "data": json.dumps(this_data),
                    "activities": activities,
                    "result": result,
                },
            )

        else:
            return render(request, "perspective_view.html")
Example #5
0
def cohort_analysis_data(request):

    if request.method == "POST":
        event_logs_path = os.path.join(settings.MEDIA_ROOT, "event_logs")
        post_data = dict(request.POST.lists())

        log_information = request.session["current_log"]

        event_log = os.path.join(event_logs_path, log_information["log_name"])
        log_format = log_import.get_log_format(log_information["log_name"])

        # Loading Group details
        group_details = request.session["group_details"]

        # Loading the Log
        log, activities = log_import.log_import(event_log, log_format, log_information)

        # Creating the Plotting Data
        df = plotting_data.create_plotting_data(log, log_format, log_information)

        # Consider Pickeling the Data for a quick performance boost after the first load

        if request.POST["operation_type"] == "timeframe":

            # TODO Replace this with the Interval picker values covered by the UI
            start_time, end_time = tuple(request.POST["start_end_time"].split(" - "))

            group = Group(
                group_details[request.POST["selected_group_name"]]["group_name"],
                group_details[request.POST["selected_group_name"]][
                    "selected_activities"
                ].split(", "),
            )

            df = plotting_data.create_timeframe_dataframe(
                df, group, start_time, end_time
            )
            plot_div = plotting.timeframe_plot_factory(df)

        else:

            Groups = [
                Group(
                    group_details[name]["group_name"],
                    group_details[name]["selected_activities"].split(", "),
                )
                for name in group_details.keys()
                if name in post_data["selected_group_names[]"]
            ]
            freq = request.POST["selected_time"]
            date_frame = plotting_data.create_concurrency_frame(df, Groups)
            if request.POST["plot_type"] == "standard":

                plot_div = plotting.concurrency_plot_factory(
                    date_frame,
                    Groups,
                    freq=freq,
                    aggregate=settings.AGGREGATE_FUNCTIONS[
                        request.POST["selected_aggregation"]
                    ],
                )

            else:
                uniform = (
                    True if request.POST["amplitude_plot_type"] == "uniform" else False
                )
                plot_div = plotting.amplitude_plot_factory(date_frame, Groups, uniform)

    post_data["plot_div"] = plot_div

    html = loader.render_to_string("cohort_analysis_plot.html", post_data)
    print("Finished Plot Creation")

    return HttpResponse(html)
def activity_filter(request):
    event_logs_path = os.path.join(settings.MEDIA_ROOT, "event_logs")
    load_log_succes = False
    log_information = None
    filteredresult = None
    # TODO Load the Log Information, else throw/redirect to Log Selection
    if "current_log" in request.session and request.session["current_log"] is not None:
        log_information = request.session["current_log"]
        print(log_information)

    if log_information is not None:

        event_log = os.path.join(event_logs_path, log_information["log_name"])
        log_format = log_import.get_log_format(log_information["log_name"])

        # Import the Log considering the given Format
        log, activities = log_import.log_import(event_log, log_format, log_information)
        load_log_succes = True

    if request.method == "POST":
        selected_activity = request.POST["selected_activity"]
        result = stats.get_log_statistics(log, log_format, log_information)
        case_ids = stats.get_case_ids_by_activity(
            log, selected_activity, log_format, log_information
        )


        
        if log_format == "xes": 
            filtered_log = pm4py.filter_trace_attribute_values(
                log, log_information["case_id"], case_ids, retain=True)
        else: 
            filtered_log = log[log["case:concept:name"].isin(case_ids)]




        filteredresult = stats.get_log_statistics(
            filtered_log, log_format, log_information
        )
        
        dfg = dfg_discovery.apply(filtered_log)
        this_data, temp_file = plotting.dfg_to_g6(dfg)
        re.escape(temp_file)
        network = {}
        if filteredresult is None:
            filteredresult = result
        keys_to_extract = [
            "Nvariant",
            "Nunique_Activities",
            "Nactivities",
            "Ncase",
            "StartTime",
            "EndTime",
            "TotalDuration",
            "MedianCaseDuration",
            "MeanCaseDuration",
            "MinCaseDuration",
            "MaxCaseDuration",
        ]
        subsetfilteredresult = {
            key: str(filteredresult[key]) for key in keys_to_extract
        }

    message = {
        "success": True,
        "filtered_result": subsetfilteredresult,
        "data": json.dumps(this_data),
        "responseText": "Inactivated successfully!",
    }
    return JsonResponse(message)