def execute_script(): # import the log log = xes_importer.apply(os.path.join("..", "tests", "input_data", "receipt.xes"), variant="nonstandard") roles = roles_algorithm.apply(log) # print the results (grouped activities) on the screen print([x[0] for x in roles])
r = random.random() for i in range(len(chooselist) - 1): if chooselist[i][1] <= r and chooselist[i + 1][1] >= r: simrestrace.append(chooselist[i + 1][0]) print(event[logname], chooselist[i + 1][0]) simreslog.append(simrestrace) return simreslog tree = inductive_miner.apply_tree(log) list0 = [] notdoact(tree, list0) actrescount = getactivityresourcecount(log, list0, "concept:name", "org:group") #print(actrescount,"actrescount") roles = roles_discovery.apply( log, variant=None, parameters={rpd.Parameters.RESOURCE_KEY: "org:group"}) #rescluster = getresoucecluster(log,roles,"concept:name","org:resource") print(roles, "roles") resourcesimulation = simulateresource(log, actrescount, roles, "concept:name", "org:group") print(resourcesimulation, "resourcesimulation") #join activity but nothing different to the last segment. #ja_values = sna.apply(log, variant=sna.Variants.JOINTACTIVITIES_LOG) #gviz_ja_py = sna_visualizer.apply(ja_values, variant=sna_visualizer.Variants.PYVIS) #sna_visualizer.view(gviz_ja_py, variant=sna_visualizer.Variants.PYVIS) #print(hw_values,'~~', roles,'~~~',ja_values,"hw_values,roles,ja_values") ''' net, im, fm = inductive_miner.apply(log) gviz = visualizer.apply(net, im, fm, parameters={visualizer.Variants.WO_DECORATION.value.Parameters.DEBUG: True}) visualizer.view(gviz)
def submit(request): global ADRESS global activitiescapacity global activitylimit global businesshour global businessday global stop global miss global limittime global starttime2 global numtrace global tiex global capacity global tracelimit global Waitingtime global Actresource global Duration global Deviation global Waitingtime global Frequency logadr = ADRESS log = infra.recieve_and_convert_log.convert_log(logadr, logname, logtime, logtran, logstart, logcompl, logreso, logid, inputname[-3:]) ptree = infra.recieve_and_convert_log.get_processtree(log) duration = infra.recieve_and_convert_log.get_duration(log) Duration = duration deviation = infra.recieve_and_convert_log.get_deviation(duration, log) Deviation = deviation waitingtime = infra.recieve_and_convert_log.waitingtime(log) Waitingtime = waitingtime capacity = infra.recieve_and_convert_log.initialtrace(log)[0] tracelimit = infra.recieve_and_convert_log.initialtrace(log)[1] #capacity = request.POST.get('catr') #tracelimit = request.POST.get('litr') #Waitingtime = request.POST.get('trdu1') #activitiescapacity = request.POST.get('caac') #activitylimit = request.POST.get('liac') businesshour = request.POST.get('buho') businessday = request.POST.get('buda') stop = request.POST.get('in') miss = 'y' limittime = request.POST.get('liti') starttime2 = request.POST.get('stti') numtrace = request.POST.get('geca') tiex = '' activitiescapacity = infra.recieve_and_convert_log.computecapacity(log) activitylimit = infra.recieve_and_convert_log.initiallimit(log)[0] #tracelimit = request.POST.get('trli') if tiex == '': tiex1 = 'y' #frequency = infra.recieve_and_convert_log.get_waitinhour(log,Waitingtime,tiex1,Watichange) #Frequency = frequency #resourcedict = infra.recieve_and_convert_log.initialresource1(log) Actresource = roles_discovery.apply( log, variant=None, parameters={rpd.Parameters.RESOURCE_KEY: logreso}) #Actresource = resourcedict[1] #oldnumtrace = infra.recieve_and_convert_log.statics(log)[0] #activitylimit = [] ''' for ele in Duration: if numtrace == '': numtrace = 100 #print(ele,ele[0],Actresource[ele[0]],numtrace,oldnumtrace) activitylimit.append(int(Actresource[ele[0]]*numtrace)/oldnumtrace) ''' #activitiescapacity = '' activitylimit = '' activitiescapacity = infra.recieve_and_convert_log.computecapacity(log) tracelimit = '' window = 1 return render(request, 'config.html', { 'Waitingtime': Waitingtime, 'window': window })
def statics(request): logadr = ADRESS log = infra.recieve_and_convert_log.convert_log(logadr, logname, logtime, logtran, logstart, logcompl, logreso, logid, inputname[-3:]) ptree = infra.recieve_and_convert_log.get_processtree(log) #duration = infra.recieve_and_convert_log.get_duration(log) duration = Duration #deviation = infra.recieve_and_convert_log.get_deviation(duration,log) deviation = Deviation #waitingtime = infra.recieve_and_convert_log.waitingtime(log) waitingtime = Waitingtime #frequency = infra.recieve_and_convert_log.get_waitinhour(log,Waitingtime,'n',Watichange) #frequency = Frequency st = datetime.fromtimestamp(100000000) et = datetime.fromtimestamp(200000000) bh_object = BusinessHours(st, et) worked_time = bh_object.getseconds() #log_path = os.path.join("tests","input_data","receipt.xes") initialtrace = infra.recieve_and_convert_log.initialtrace(log) x, y = case_statistics.get_kde_caseduration( log, parameters={ constants.PARAMETER_CONSTANT_TIMESTAMP_KEY: "time:timestamp" }) gviz1 = graphs_visualizer.apply_plot( x, y, variant=graphs_visualizer.Variants.CASES) gviz2 = graphs_visualizer.apply_semilogx( x, y, variant=graphs_visualizer.Variants.CASES) graphs_visualizer.save(gviz1, "DES1/static/image1.gv.png") graphs_visualizer.save(gviz2, "DES1/static/image2.gv.png") x, y = attributes_filter.get_kde_date_attribute(log, attribute="time:timestamp") gviz3 = graphs_visualizer.apply_plot( x, y, variant=graphs_visualizer.Variants.DATES) graphs_visualizer.save(gviz3, "DES1/static/image3.gv.png") ''' x, y = attributes_filter.get_kde_numeric_attribute(log, "amount") gviz4 = graphs_visualizer.apply_plot(x, y, variant=graphs_visualizer.Variants.ATTRIBUTES) gviz5 = graphs_visualizer.apply_semilogx(x, y, variant=graphs_visualizer.Variants.ATTRIBUTES) graphs_visualizer.save(gviz4,"./static/image4.gv.png") graphs_visualizer.save(gviz5,"./static/image5.gv.png") ''' numtrace = infra.recieve_and_convert_log.statics(log)[0] numactivity = infra.recieve_and_convert_log.statics(log)[1] activitylist = infra.recieve_and_convert_log.statics(log)[2] timeinterval = infra.recieve_and_convert_log.statics(log)[3] meanthoughputtime = infra.recieve_and_convert_log.statics(log)[4][0] deviationthoughputtime = infra.recieve_and_convert_log.statics(log)[4][1] arrivalratio = infra.recieve_and_convert_log.statics(log)[5] dispersionratio = infra.recieve_and_convert_log.statics(log)[6] resourcedict = infra.recieve_and_convert_log.initialresource1(log) initialcapacity = infra.recieve_and_convert_log.computecapacity(log) initiallimit = infra.recieve_and_convert_log.initiallimit(log)[0] initialcaplim = [] for i in range(len(initialcapacity)): initialcaplim.append( (initialcapacity[i][0], initialcapacity[i][1], initiallimit[i][1])) #print(intialcapacity,"line 205") Actresource = roles_discovery.apply( log, variant=None, parameters={rpd.Parameters.RESOURCE_KEY: logreso}) list0 = [] infra.recieve_and_convert_log.notdoact(ptree, list0) handover = infra.recieve_and_convert_log.getactivityresourcecount( log, list0, logname, logreso)[1] for i, x in enumerate(duration): duration[i] = (x[0], round(x[1], 2), round(deviation[i][1], 2)) for i, x in enumerate(deviation): deviation[i] = (x[0], round(x[1], 2)) context = {'log':log,'ptree':ptree,'duration':duration,'deviation':deviation,\ 'worked_time':worked_time,'numtrace':numtrace,'numactivity':numactivity,'activitylist':activitylist,\ 'timeinterval':timeinterval,'meanthoughputtime':meanthoughputtime,\ 'deviationthoughputtime':deviationthoughputtime,'arrivalratio':arrivalratio,\ 'dispersionratio':dispersionratio,'resourcedict':Actresource,'handover':handover,"initialcaplim":initialcaplim,'initialtrace':initialtrace} return render(request, 'statics.html', context)
def test_role_receipt_xes(self): log = xes_importer.apply(os.path.join("..", "tests", "input_data", "receipt.xes")) roles = role_mining.apply(log)
def test_role_receipt_csv(self): df = pd.read_csv(os.path.join("input_data", "receipt.csv")) df = dataframe_utils.convert_timestamp_columns_in_df(df) roles = role_mining.apply(df)
def test_role_running_xes(self): log = xes_importer.apply(os.path.join("..", "tests", "input_data", "running-example.xes")) roles = role_mining.apply(log)