def list_offsite(request): procedures = Procedure.with_job_tasks('Offsite') last_jobs = Procedure.jobs_with_job_tasks('Offsite') extra_content = {'procedures': procedures, 'last_jobs' : last_jobs, 'title': u"Procedimentos com offsite ativo"} return render_to_response(request, "procedures_list.html", extra_content)
def list_offsite(request): procedures = Procedure.with_job_tasks('Offsite') last_jobs = Procedure.jobs_with_job_tasks('Offsite') extra_content = { 'procedures': procedures, 'last_jobs': last_jobs, 'title': u"Procedimentos com offsite ativo" } return render_to_response(request, "procedures_list.html", extra_content)
def procedure(self): from nimbus.procedures.models import Procedure procedure_name = self.name.split('_')[0] if self.type == "R": #Esta job nao tem um procedure definido pois eh um restore from nimbus.computers.models import Computer client_name = self.client.name.split('_')[0] fake_procedure = Procedure() fake_procedure.computer = Computer.objects.get(uuid__uuid_hex=client_name) return fake_procedure if not hasattr(self, '_procedure'): try: self._procedure = Procedure.objects.select_related().get(uuid__uuid_hex=procedure_name) except Procedure.DoesNotExist, error: self._procedure = None
def get_tree_search_file(request): pattern = request.POST['pattern'] job_id = request.POST['job_id'] files = Procedure.search_files(job_id, pattern) response = simplejson.dumps(files) return HttpResponse(response, mimetype="text/plain")
def get_tree(request): path = request.POST['path'] job_id = request.POST['job_id'] computer_id = request.POST['computer_id'] computer = Computer.objects.get(id=computer_id) files = Procedure.list_files(job_id, computer, path) response = simplejson.dumps(files) return HttpResponse(response, mimetype="text/plain")
def procedure(self): from nimbus.procedures.models import Procedure procedure_name = self.name.split('_')[0] if self.type == "R": #Esta job nao tem um procedure definido pois eh um restore from nimbus.computers.models import Computer client_name = self.client.name.split('_')[0] fake_procedure = Procedure() fake_procedure.computer = Computer.objects.get( uuid__uuid_hex=client_name) return fake_procedure if not hasattr(self, '_procedure'): try: self._procedure = Procedure.objects.select_related().get( uuid__uuid_hex=procedure_name) except Procedure.DoesNotExist, error: self._procedure = None
def get_tree(request): path = request.POST['path'] job_id = request.POST['job_id'] computer_id = request.POST['computer_id'] computer = Computer.objects.get(id=computer_id) files = Procedure.list_files(job_id, path, computer) # teste que força o retorno da lista de arquivos #files = ["/home/lucas/arquivo1.txt", "/home/lucas/arquivo2.txt"]; response = simplejson.dumps(files) return HttpResponse(response, mimetype="text/plain")
def get_tree(request): path = request.POST['path'] job_id = request.POST['job_id'] computer_id = request.POST['computer_id'] computer = Computer.objects.get(id=computer_id) files = Procedure.list_files(job_id, computer, path) # teste que força o retorno da lista de arquivos #files = ["/home/lucas/arquivo1.txt", "/home/lucas/arquivo2.txt"]; response = simplejson.dumps(files) return HttpResponse(response, mimetype="text/plain")
def home(request): job_bytes = Job.get_bytes_from_last_jobs() grafico_backup_dados = { 'title': u"Quantidade de dados realizados backup", 'width': "100%", 'type': "bar", 'cid': "chart1", 'header': [d.strftime("%d/%m/%y") for d in sorted(job_bytes)], 'labels': [utils.filesizeformat(v) for k, v in sorted(job_bytes.items())], 'lines': { "Dados": utils.ordered_dict_value_to_formatted_float(job_bytes) } } job_files = Job.get_files_from_last_jobs() grafico_backup_arquivos = { 'title': u"Quantidade de arquivos realizados backup", 'width': "100%", 'type': "bar", 'cid': "chart2", 'header': [d.strftime("%d/%m/%y") for d in sorted(job_files)], 'labels': [int(v) for k, v in sorted(job_files.items())], 'lines': { "Arquivos": [int(v) for k, v in sorted(job_files.items())] } } graphics = Graphics() blocks = graphics.render_blocks() memory = systeminfo.get_memory_usage() memory_free = 100 - memory grafico_uso_memoria = {'title': u"Uso da memória", 'width': "90%", 'type': "pie", 'cid': "chart4", 'header': ["Gigabytes"], 'lines': { "Disponível": [memory_free], "Ocupado": [memory]}} cpu = systeminfo.get_cpu_usage() cpu_free = 100 - memory grafico_uso_cpu = {'title': u"Uso da CPU", 'width': "", "type": "pie", 'cid': "chart5", 'header': ["Clocks"], 'lines': { "Disponível": [cpu_free], "Ocupado": [cpu]}} # Dados de content: # - type # - label # - date # - message last_jobs = Procedure.all_non_self_jobs()[:5] return render_to_response(request, "home.html", locals())
def history(request, object_id=False): #TODO: Filtrar jobs de um procedimento específico title = u'Histórico de Procedimentos' # get page number try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 #get all jobs all_jobs = Procedure.all_non_self_jobs() paginator = Paginator(all_jobs, 20) try: jobs = paginator.page(page) except (EmptyPage, InvalidPage): jobs = paginator.page(paginator.num_pages) last_jobs = jobs.object_list return render_to_response(request, "procedures_history.html", locals())
def history(request, object_id=False): #TODO: Filtrar jobs de um procedimento específico title = u'Histórico de Procedimentos' # get page number try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 #get all jobs all_jobs = Procedure.all_jobs() paginator = Paginator(all_jobs, 20) try: jobs = paginator.page(page) except (EmptyPage, InvalidPage): jobs = paginator.page(paginator.num_pages) last_jobs = jobs.object_list return render_to_response(request, "procedures_history.html", locals())
def home(request): job_bytes = Job.get_bytes_from_last_jobs() table1 = { "title": u"Quantidade de dados realizados backup", "width": "100%", "type": "bar", "cid": "chart1", "header": [d.strftime("%d/%m/%y") for d in sorted(job_bytes)], "labels": [utils.filesizeformat(v) for k, v in sorted(job_bytes.items())], "lines": {"Dados": utils.ordered_dict_value_to_formatted_float(job_bytes)}, } job_files = Job.get_files_from_last_jobs() table2 = { "title": u"Quantidade de arquivos realizados backup", "width": "100%", "type": "bar", "cid": "chart2", "header": [d.strftime("%d/%m/%y") for d in sorted(job_files)], "labels": [int(v) for k, v in sorted(job_files.items())], "lines": {"Arquivos": [int(v) for k, v in sorted(job_files.items())]}, } graphsdata.update_disk_graph() graph_data_manager = graphsdata.GraphDataManager() diskdata = graph_data_manager.list_disk_measures() table3 = { "title": u"Ocupação do disco (GB)", "width": "", "type": "area", "cid": "chart3", "height": "200", "header": [i[0] for i in diskdata], "labels": [utils.filesizeformat(i[1], "GB") for i in diskdata], } # table3['header'] = ["Gigabytes"] # setando valor padrao t3data = [utils.filesizeformat(i[1], "GB") for i in diskdata] if len(diskdata) else [0.0] table3["lines"] = {"Disponível": t3data} memory = systeminfo.get_memory_usage() memory_free = 100 - memory table4 = { "title": u"Uso da memória", "width": "90%", "type": "pie", "cid": "chart4", "header": ["Gigabytes"], "lines": {"Disponível": [memory_free], "Ocupado": [memory]}, } cpu = systeminfo.get_cpu_usage() cpu_free = 100 - memory table5 = { "title": u"Uso da CPU", "width": "", "type": "pie", "cid": "chart5", "header": ["Clocks"], "lines": {"Disponível": [cpu_free], "Ocupado": [cpu]}, } # Dados de content: # - type # - label # - date # - message last_jobs = Procedure.all_non_self_jobs()[:5] return render_to_response(request, "home.html", locals())
def home(request): job_bytes = Job.get_bytes_from_last_jobs() table1 = { 'title': u"Quantidade de dados realizados backup", 'width': "100%", 'type': "bar", 'cid': "chart1", 'header': [d.strftime("%d/%m/%y") for d in sorted(job_bytes)], 'labels': [utils.filesizeformat(v) for k, v in sorted(job_bytes.items())], 'lines': { "Dados": utils.ordered_dict_value_to_formatted_float(job_bytes) } } job_files = Job.get_files_from_last_jobs() table2 = { 'title': u"Quantidade de arquivos realizados backup", 'width': "100%", 'type': "bar", 'cid': "chart2", 'header': [d.strftime("%d/%m/%y") for d in sorted(job_files)], 'labels': [int(v) for k, v in sorted(job_files.items())], 'lines': { "Arquivos": [int(v) for k, v in sorted(job_files.items())] } } graphsdata.update_disk_graph() graph_data_manager = graphsdata.GraphDataManager() diskdata = graph_data_manager.list_disk_measures() table3 = { 'title': u"Ocupação do disco (GB)", 'width': "", 'type': "area", 'cid': "chart3", 'height': "200", 'header': [i[0] for i in diskdata], 'labels': [utils.filesizeformat(i[1], "GB") for i in diskdata] } #table3['header'] = ["Gigabytes"] #setando valor padrao t3data = [utils.filesizeformat(i[1], "GB") for i in diskdata] if len(diskdata) else [0.0] table3['lines'] = {"Disponível": t3data} memory = systeminfo.get_memory_usage() memory_free = 100 - memory table4 = { 'title': u"Uso da memória", 'width': "90%", 'type': "pie", 'cid': "chart4", 'header': ["Gigabytes"], 'lines': { "Disponível": [memory_free], "Ocupado": [memory] } } cpu = systeminfo.get_cpu_usage() cpu_free = 100 - memory table5 = { 'title': u"Uso da CPU", 'width': "", "type": "pie", 'cid': "chart5", 'header': ["Clocks"], 'lines': { "Disponível": [cpu_free], "Ocupado": [cpu] } } # Dados de content: # - type # - label # - date # - message last_jobs = Procedure.all_non_self_jobs()[:5] return render_to_response(request, "home.html", locals())
def cancel_job(request, job_id): if request.method == "POST": Procedure.cancel_jobid(job_id) messages.success(request, "Procedimento cancelado com sucesso") return redirect('/procedures/list')
def list_all(request): procedures = Procedure.objects.filter(id__gt=1) title = u"Procedimentos de backup" last_jobs = Procedure.all_non_self_jobs()[:10] return render_to_response(request, "procedures_list.html", locals())
def list_all(request): procedures = Procedure.objects.filter(id__gt=1) title = u"Procedimentos de backup" last_jobs = Procedure.all_jobs()[:10] return render_to_response(request, "procedures_list.html", locals())