def post(self, request, pk): """ Run a Script identified as "<module>.<script>" and return the pending JobResult as the result """ script = self._get_script(pk)() input_serializer = serializers.ScriptInputSerializer(data=request.data) # Check that at least one RQ worker is running if not Worker.count(get_connection('default')): raise RQWorkerNotRunningException() if input_serializer.is_valid(): data = input_serializer.data['data'] commit = input_serializer.data['commit'] script_content_type = ContentType.objects.get(app_label='extras', model='script') job_result = JobResult.enqueue_job( run_script, script.full_name, script_content_type, request.user, data=data, request=copy_safe_request(request), commit=commit) script.result = job_result serializer = serializers.ScriptDetailSerializer( script, context={'request': request}) return Response(serializer.data) return Response(input_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def process(self, router, as_task=False, no_commit_check=False): self.stdout.write(f" - {router.hostname} ... ", ending="") if not as_task: configuration = router.generate_configuration() error, changes = router.set_napalm_configuration( configuration, commit=no_commit_check) if not no_commit_check and not error and changes: error, _ = router.set_napalm_configuration(configuration, commit=True) if not error: self.stdout.write(self.style.SUCCESS("success")) else: self.stdout.write(self.style.ERROR("failed")) else: job = JobResult.enqueue_job( set_napalm_configuration, "commands.configure_routers", Router, None, router, True, ) self.stdout.write(self.style.SUCCESS(f"task #{job.id}"))
def run(self, request, pk): """ Run a Report identified as "<module>.<script>" and return the pending JobResult as the result """ # Check that the user has permission to run reports. if not request.user.has_perm('extras.run_script'): raise PermissionDenied( "This user does not have permission to run reports.") # Check that at least one RQ worker is running if not Worker.count(get_connection('default')): raise RQWorkerNotRunningException() # Retrieve and run the Report. This will create a new JobResult. report = self._retrieve_report(pk) report_content_type = ContentType.objects.get(app_label='extras', model='report') job_result = JobResult.enqueue_job(run_report, report.full_name, report_content_type, request.user) report.result = job_result serializer = serializers.ReportDetailSerializer( report, context={'request': request}) return Response(serializer.data)
def configure(self, request): # Check user permission first if not request.user.has_perm("peering.deploy_router_configuration"): return Response(None, status=status.HTTP_403_FORBIDDEN) router_ids = (request.data.get("routers[]", []) if request.method != "GET" else request.query_params.getlist("routers[]")) # No router IDs, nothing to configure if len(router_ids) < 1: raise ServiceUnavailable("No routers to configure.") routers = Router.objects.filter(pk__in=router_ids) commit = request.method not in SAFE_METHODS job_results = [] for router in routers: job_result = JobResult.enqueue_job( set_napalm_configuration, "peering.router.set_napalm_configuration", Router, request.user, router, commit, ) job_results.append(job_result) serializer = get_serializer_for_model(JobResult) return Response( serializer(job_results, many=True, context={ "request": request }).data, status=status.HTTP_202_ACCEPTED, )
def poll_bgp_sessions(self, request, pk=None): # Check user permission first if not request.user.has_perm( "peering.change_directpeeringsession" ) or not request.user.has_perm( "peering.change_internetexchangepeeringsession"): return Response(status=status.HTTP_403_FORBIDDEN) job_results = [] for router in self.get_object().get_routers(): job_results.append( JobResult.enqueue_job( poll_bgp_sessions, "peering.router.poll_bgp_sessions", Router, request.user, router, )) return Response( data=[ JobResultSerializer(instance=job_result, context={ "request": request }).data for job_result in job_results ], status=status.HTTP_202_ACCEPTED, )
def update_local(self, request): job_result = JobResult.enqueue_job(synchronize, "peeringdb.synchronize", Synchronization, request.user) serializer = get_serializer_for_model(JobResult) return Response( serializer(instance=job_result, context={ "request": request }).data, status=status.HTTP_202_ACCEPTED, )
def handle(self, *args, **options): # Gather all available reports reports = get_reports() # Run reports for module_name, report_list in reports: for report in report_list: if module_name in options['reports'] or report.full_name in options['reports']: # Run the report and create a new JobResult self.stdout.write( "[{:%H:%M:%S}] Running {}...".format(timezone.now(), report.full_name) ) report_content_type = ContentType.objects.get(app_label='extras', model='report') job_result = JobResult.enqueue_job( run_report, report.full_name, report_content_type, None ) # Wait on the job to finish while job_result.status not in JobResultStatusChoices.TERMINAL_STATE_CHOICES: time.sleep(1) job_result = JobResult.objects.get(pk=job_result.pk) # Report on success/failure if job_result.status == JobResultStatusChoices.STATUS_FAILED: status = self.style.ERROR('FAILED') elif job_result == JobResultStatusChoices.STATUS_ERRORED: status = self.style.ERROR('ERRORED') else: status = self.style.SUCCESS('SUCCESS') for test_name, attrs in job_result.data.items(): self.stdout.write( "\t{}: {} success, {} info, {} warning, {} failure".format( test_name, attrs['success'], attrs['info'], attrs['warning'], attrs['failure'] ) ) self.stdout.write( "[{:%H:%M:%S}] {}: {}".format(timezone.now(), report.full_name, status) ) self.stdout.write( "[{:%H:%M:%S}] {}: Duration {}".format(timezone.now(), report.full_name, job_result.duration) ) # Wrap things up self.stdout.write( "[{:%H:%M:%S}] Finished".format(timezone.now()) )
def test_napalm_connection(self, request, pk=None): job_result = JobResult.enqueue_job( test_napalm_connection, "peering.router.test_napalm_connection", Router, request.user, self.get_object(), ) return Response( JobResultSerializer(instance=job_result, context={ "request": request }).data, status=status.HTTP_202_ACCEPTED, )
def process(self, router, as_task=False): self.stdout.write(f" - {router.hostname} ... ", ending="") if not as_task: success = router.poll_bgp_sessions() if success: self.stdout.write(self.style.SUCCESS("success")) else: self.stdout.write(self.style.ERROR("failed")) else: job = JobResult.enqueue_job( poll_bgp_sessions, "commands.poll_bgp_sessions", Router, None, router, ) self.stdout.write(self.style.SUCCESS(f"task #{job.id}"))
def configuration(self, request, pk=None): # Check user permission first if not request.user.has_perm("peering.view_router_configuration"): return Response(status=status.HTTP_403_FORBIDDEN) job_result = JobResult.enqueue_job( generate_configuration, "peering.router.generate_configuration", Router, request.user, self.get_object(), ) return Response( JobResultSerializer(instance=job_result, context={ "request": request }).data, status=status.HTTP_202_ACCEPTED, )
def import_sessions(self, request, pk=None): if not request.user.has_perm( "peering.add_internetexchangepeeringsession"): return Response(status=status.HTTP_403_FORBIDDEN) job_result = JobResult.enqueue_job( import_sessions_to_internet_exchange, "peering.internet_exchange.import_sessions", InternetExchange, request.user, self.get_object(), ) return Response( data=JobResultSerializer(instance=job_result, context={ "request": request }).data, status=status.HTTP_202_ACCEPTED, )
def poll_sessions(self, request, pk=None): # Check user permission first if not request.user.has_perm("peering.change_directpeeringsession"): return Response(status=status.HTTP_403_FORBIDDEN) job_result = JobResult.enqueue_job( poll_peering_sessions, "peering.bgpgroup.poll_peering_sessions", BGPGroup, request.user, self.get_object(), ) return Response( data=JobResultSerializer(instance=job_result, context={ "request": request }).data, status=status.HTTP_202_ACCEPTED, )
def configure(self, request): # Check user permission first if not request.user.has_perm("peering.deploy_router_configuration"): return Response(None, status=status.HTTP_403_FORBIDDEN) # Make sure request is valid serializer = RouterConfigureSerializer(data=request.data) serializer.is_valid(raise_exception=True) router_ids = serializer.validated_data.get("routers") if len(router_ids) < 1: raise ValidationError("routers list must not be empty") commit = serializer.validated_data.get("commit") routers = Router.objects.filter(pk__in=router_ids) if not routers: return Response(status=status.HTTP_404_NOT_FOUND) job_results = [] for router in routers: job_result = JobResult.enqueue_job( set_napalm_configuration, "peering.router.set_napalm_configuration", Router, request.user, router, commit, ) job_results.append(job_result) return Response( JobResultSerializer(job_results, many=True, context={ "request": request }).data, status=status.HTTP_202_ACCEPTED, )
def handle(self, *args, **options): api = PeeringDB() if options["flush"]: self.stdout.write("[*] Removing cached data") api.clear_local_database() return if options["tasks"]: job = JobResult.enqueue_job(synchronize, "peeringdb.synchronize", Synchronization, None) self.stdout.write(self.style.SUCCESS(f"task #{job.id}")) else: self.stdout.write("[*] Caching data locally") api.update_local_database(api.get_last_sync_time()) self.stdout.write("[*] Updating AS details") autonomous_systems = AutonomousSystem.objects.defer("prefixes") for autonomous_system in autonomous_systems: autonomous_system.synchronize_with_peeringdb() if options["verbosity"] >= 2: self.stdout.write( f" - Synchronized AS{autonomous_system.asn}")