def tasks(request): page = request.GET.get('page') page2 = request.GET.get('page2') webpage = 'currenttask' if page == None: page = 1 else: webpage = 'currenttask' if page2 == None: page2 = 1 else: webpage = 'historytask' list = Task.objects.filter( status__in=['running', 'waitting', 'error', 'pausing']) list2 = Task.objects.filter(status='stopping') p = paging(list, page, 10) p2 = paging(list2, page2, 10) running_count = Task.objects.filter(status='running').count() pausing_count = Task.objects.filter(status='pausing').count() stopping_count = Task.objects.filter(status='stopping').count() error_count = Task.objects.filter(status='error').count() return render( request, 'crawlermanage/tasks.html', { 'p': p, 'p2': p2, 'status': webpage, 'running_count': running_count, 'pausing_count': pausing_count, 'stopping_count': stopping_count, 'error_count': error_count })
def machinelist(request): page = request.GET.get('page') if page == None: page = 1 list = Machine.objects.all() p = paging(list, page, 10) return render(request, 'crawlermanage/machine_list.html', {'p': p})
def blogdata(request): taskid = request.GET.get('taskid') list = [] if taskid != None: list = Blog.objects.filter(taskid=taskid) page = request.GET.get('page') if page == None: page = 1 p = paging(list, page, 10) return render(request, 'crawlermanage/blogdata.html', locals())
def ecommercedata(request): taskid = request.GET.get('taskid') if taskid != None: goodslist = Goods.objects.filter(taskid=taskid) shoplist = Stores.objects.filter(taskid=taskid) page = request.GET.get('page') page2 = request.GET.get('page2') webpage = 'goods' if page == None: page = 1 else: webpage = 'goods' if page2 == None: page2 = 1 else: webpage = 'stores' p = paging(goodslist, page, 10) p2 = paging(shoplist, page2, 10) return render(request, 'crawlermanage/ecommercedata.html', { 'p': p, 'p2': p2, 'status': webpage })
def processlist(request): page = request.GET.get('page') if page == None: page = 1 list = Process.objects.filter(status__in=['running', 'pausing']) p = paging(list, page, 10) tasknames = [] for i in p.p_content: logger.info(i.taskid) name = Task.objects.get(id=i.taskid)['taskname'] tasknames.append(name) pc_name = zip(p.p_content, tasknames) return render(request, 'crawlermanage/process_list.html', { 'p': p, 'pc_name': pc_name })