def test_ip_address_vars(self): """ Test that IPAddress variable fields behave as expected. This test case exercises the following types for both IPv4 and IPv6: - IPAddressVar - IPAddressWithMaskVar - IPNetworkVar """ with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): module = "test_ipaddress_vars" name = "TestIPAddresses" job_class = get_job(f"local/{module}/{name}") # Fill out the form form_data = dict( ipv4_address="1.2.3.4", ipv4_with_mask="1.2.3.4/32", ipv4_network="1.2.3.0/24", ipv6_address="2001:db8::1", ipv6_with_mask="2001:db8::1/64", ipv6_network="2001:db8::/64", ) form = job_class().as_form(form_data) self.assertTrue(form.is_valid()) # Prepare the job data job_result = JobResult.objects.create( name=job_class.class_path, obj_type=self.job_content_type, user=None, job_id=uuid.uuid4(), ) data = job_class.serialize_data(form.cleaned_data) # Run the job and extract the job payload data # Changing commit=True as commit=False will rollback database changes including the # logs that we are trying to read. See above note on why we are using the default database. # Also need to pass a mock request object as execute_webhooks will be called with the creation # of the objects. run_job(data=data, request=self.request, commit=True, job_result_pk=job_result.pk) job_result.refresh_from_db() log_info = JobLogEntry.objects.filter( job_result=job_result, log_level=LogLevelChoices.LOG_INFO, grouping="run").first() job_result_data = json.loads(log_info.log_object) # Assert stuff self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_COMPLETED) self.assertEqual(form_data, job_result_data)
def test_job_data_as_string(self): """ Test that job doesn't error when not a dictionary. """ with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): module = "test_object_vars" name = "TestObjectVars" job_class = get_job(f"local/{module}/{name}") # Prepare the job data job_result = JobResult.objects.create( name=job_class.class_path, obj_type=self.job_content_type, user=None, job_id=uuid.uuid4(), ) data = "BAD DATA STRING" run_job(data=data, request=None, commit=False, job_result_pk=job_result.pk) job_result.refresh_from_db() # Assert stuff self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_ERRORED) log_failure = JobLogEntry.objects.filter( grouping="initialization", log_level=LogLevelChoices.LOG_FAILURE ).first() self.assertIn("Data should be a dictionary", log_failure.message)
def test_run_job_fail(self): """Test that file upload succeeds; job FAILS; files deleted.""" with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): job_name = "local/test_file_upload_fail/TestFileUploadFail" job_class = get_job(job_name) job_result = JobResult.objects.create( name=job_class.class_path, obj_type=self.job_content_type, user=None, job_id=uuid.uuid4(), ) # Serialize the file to FileProxy data = {"file": self.dummy_file} form = job_class().as_form(files=data) self.assertTrue(form.is_valid()) serialized_data = job_class.serialize_data(form.cleaned_data) # Assert that the file was serialized to a FileProxy self.assertTrue(isinstance(serialized_data["file"], uuid.UUID)) self.assertEqual(serialized_data["file"], FileProxy.objects.latest().pk) self.assertEqual(FileProxy.objects.count(), 1) # Run the job run_job(data=serialized_data, request=None, commit=False, job_result_pk=job_result.pk) job_result.refresh_from_db() # Can't check log objects when jobs are reverted (within tests anyways.) # This is due to the fake job_logs db not being available for tests. # Assert that FileProxy was cleaned up self.assertEqual(FileProxy.objects.count(), 0)
def test_optional_object_var(self): """ Test that an optional Object variable field behaves as expected. """ with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): module = "test_object_var_optional" name = "TestOptionalObjectVar" job_class = get_job(f"local/{module}/{name}") # Prepare the job data job_result = JobResult.objects.create( name=job_class.class_path, obj_type=self.job_content_type, user=None, job_id=uuid.uuid4(), ) data = {"region": None} # Run the job without the optional var provided run_job(data=data, request=self.request, commit=True, job_result_pk=job_result.pk) job_result.refresh_from_db() info_log = JobLogEntry.objects.filter( job_result=job_result, log_level=LogLevelChoices.LOG_INFO, grouping="run" ).first() # Assert stuff self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_COMPLETED) self.assertEqual(info_log.log_object, None) self.assertEqual(info_log.message, "The Region if any that the user provided.") self.assertEqual(job_result.data["output"], "\nNice Region (or not)!")
def test_required_object_var(self): """ Test that a required Object variable field behaves as expected. """ with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): module = "test_object_var_required" name = "TestRequiredObjectVar" job_class = get_job(f"local/{module}/{name}") # Prepare the job data job_result = JobResult.objects.create( name=job_class.class_path, obj_type=self.job_content_type, user=None, job_id=uuid.uuid4(), ) data = {"region": None} run_job(data=data, request=None, commit=False, job_result_pk=job_result.pk) job_result.refresh_from_db() # Assert stuff self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_ERRORED) log_failure = JobLogEntry.objects.filter( grouping="initialization", log_level=LogLevelChoices.LOG_FAILURE ).first() self.assertIn("region is a required field", log_failure.message)
def test_jobs_registration(self): """ Check that plugin jobs are registered correctly and discoverable. """ from dummy_plugin.jobs import DummyJob self.assertIn(DummyJob, registry.get("plugin_jobs", [])) self.assertEqual( DummyJob, get_job("plugins/dummy_plugin.jobs/DummyJob"), ) self.assertIn( "plugins/dummy_plugin.jobs/DummyJob", get_job_classpaths(), ) jobs_dict = get_jobs() self.assertIn("plugins", jobs_dict) self.assertIn("dummy_plugin.jobs", jobs_dict["plugins"]) self.assertEqual( "DummyPlugin jobs", jobs_dict["plugins"]["dummy_plugin.jobs"].get("name"), ) self.assertIn("jobs", jobs_dict["plugins"]["dummy_plugin.jobs"]) self.assertIn( "DummyJob", jobs_dict["plugins"]["dummy_plugin.jobs"]["jobs"], ) self.assertEqual( DummyJob, jobs_dict["plugins"]["dummy_plugin.jobs"]["jobs"]["DummyJob"], )
def dry_run(self, request, pk): if not request.user.has_perm("extras.run_job"): raise PermissionDenied() scheduled_job = get_object_or_404(ScheduledJob, pk=pk) job_class = get_job(scheduled_job.job_class) if job_class is None: raise Http404 job = job_class() grouping, module, class_name = job_class.class_path.split("/", 2) # Immediately enqueue the job with commit=False job_content_type = ContentType.objects.get(app_label="extras", model="job") job_result = JobResult.enqueue_job( run_job, job.class_path, job_content_type, scheduled_job.user, data=scheduled_job.kwargs["data"], request=copy_safe_request(request), commit=False, # force a dry-run ) serializer = serializers.JobResultSerializer( job_result, context={"request": request}) return Response(serializer.data)
def test_read_only_job_fail(self): """ Job read only test with fail result. """ with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): module = "test_read_only_fail" name = "TestReadOnlyFail" job_class = get_job(f"local/{module}/{name}") job_content_type = ContentType.objects.get(app_label="extras", model="job") job_result = JobResult.objects.create( name=job_class.class_path, obj_type=job_content_type, user=None, job_id=uuid.uuid4(), ) run_job(data={}, request=None, commit=False, job_result_pk=job_result.pk) job_result.refresh_from_db() self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_ERRORED) self.assertEqual(Site.objects.count(), 0) # Ensure DB transaction was aborted # Also ensure the standard log message about aborting the transaction is *not* present self.assertNotEqual( job_result.data["run"]["log"][-1][-1], "Database changes have been reverted due to error.")
def test_ready_only_job_pass(self): """ Job read only test with pass result. """ with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): module = "test_read_only_pass" name = "TestReadOnlyPass" job_class = get_job(f"local/{module}/{name}") job_content_type = ContentType.objects.get(app_label="extras", model="job") job_result = JobResult.objects.create( name=job_class.class_path, obj_type=job_content_type, user=None, job_id=uuid.uuid4(), ) run_job(data={}, request=None, commit=False, job_result_pk=job_result.pk) job_result.refresh_from_db() self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_COMPLETED) self.assertEqual(Site.objects.count(), 0) # Ensure DB transaction was aborted
def test_job_fail(self): """ Job test with fail result. """ with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): module = "test_fail" name = "TestFail" job_class = get_job(f"local/{module}/{name}") job_content_type = ContentType.objects.get(app_label="extras", model="job") job_result = JobResult.objects.create( name=job_class.class_path, obj_type=job_content_type, user=None, job_id=uuid.uuid4(), ) run_job(data={}, request=None, commit=False, job_result_pk=job_result.pk) job_result.refresh_from_db() self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_ERRORED)
def test_field_order(self): """ Job test with field order. """ with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): module = "test_field_order" name = "TestFieldOrder" job_class = get_job(f"local/{module}/{name}") form = job_class().as_form() self.assertHTMLEqual( form.as_table(), """<tr><th><label for="id_var2">Var2:</label></th><td> <input class="form-control form-control" id="id_var2" name="var2" placeholder="None" required type="text"> <br><span class="helptext">Hello</span></td></tr> <tr><th><label for="id_var23">Var23:</label></th><td> <input class="form-control form-control" id="id_var23" name="var23" placeholder="None" required type="text"> <br><span class="helptext">I want to be second</span></td></tr> <tr><th><label for="id__commit">Commit changes:</label></th><td> <input checked id="id__commit" name="_commit" placeholder="Commit changes" type="checkbox"> <br><span class="helptext">Commit changes to the database (uncheck for a dry-run)</span></td></tr>""", )
def test_field_order(self): """ Job test with field order. """ module = "test_field_order" name = "TestFieldOrder" job_class = get_job(f"local/{module}/{name}") form = job_class().as_form() self.assertHTMLEqual( form.as_table(), """<tr><th><label for="id_var1">Var1:</label></th><td> <input class="form-control form-control" id="id_var1" name="var1" placeholder="None" required type="file"> <br><span class="helptext">Some file wants to be first</span></td></tr> <tr><th><label for="id_var2">Var2:</label></th><td> <input class="form-control form-control" id="id_var2" name="var2" placeholder="None" required type="text"> <br><span class="helptext">Hello</span></td></tr> <tr><th><label for="id_var23">Var23:</label></th><td> <input class="form-control form-control" id="id_var23" name="var23" placeholder="None" required type="text"> <br><span class="helptext">I want to be second</span></td></tr> <tr><th><label for="id__commit">Commit changes:</label></th><td> <input checked id="id__commit" name="_commit" placeholder="Commit changes" type="checkbox"> <br><span class="helptext">Commit changes to the database (uncheck for a dry-run)</span></td></tr>""", )
def test_run(self): with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): self.clear_worker() job_content_type = ContentType.objects.get(app_label="extras", model="job") job_name = "local/test_site_with_custom_field/TestCreateSiteWithCustomField" job_class = get_job(job_name) job_result = JobResult.objects.create( name=job_class.class_path, obj_type=job_content_type, user=None, job_id=uuid.uuid4(), ) # Run the job run_job(data={}, request=self.request, commit=True, job_result_pk=job_result.pk) self.wait_on_active_tasks() job_result.refresh_from_db() self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_COMPLETED) # Test site with a value for custom_field site_1 = Site.objects.filter(slug="test-site-one") self.assertEqual(site_1.count(), 1) self.assertEqual(CustomField.objects.filter(name="cf1").count(), 1) self.assertEqual(site_1[0].cf["cf1"], "some-value") # Test site with default value for custom field site_2 = Site.objects.filter(slug="test-site-two") self.assertEqual(site_2.count(), 1) self.assertEqual(site_2[0].cf["cf1"], "-")
def test_jobs_registration(self): """ Check that plugin jobs are registered correctly and discoverable. """ from example_plugin.jobs import ExampleJob self.assertIn(ExampleJob, registry.get("plugin_jobs", [])) self.assertEqual( ExampleJob, get_job("plugins/example_plugin.jobs/ExampleJob"), ) self.assertIn( "plugins/example_plugin.jobs/ExampleJob", get_job_classpaths(), ) jobs_dict = get_jobs() self.assertIn("plugins", jobs_dict) self.assertIn("example_plugin.jobs", jobs_dict["plugins"]) self.assertEqual( "ExamplePlugin jobs", jobs_dict["plugins"]["example_plugin.jobs"].get("name"), ) self.assertIn("jobs", jobs_dict["plugins"]["example_plugin.jobs"]) self.assertIn( "ExampleJob", jobs_dict["plugins"]["example_plugin.jobs"]["jobs"], ) self.assertEqual( ExampleJob, jobs_dict["plugins"]["example_plugin.jobs"]["jobs"]["ExampleJob"], )
def get_job_class_and_model(module, name): """Test helper function to look up a job class and job model and ensure the latter is enabled.""" class_path = f"local/{module}/{name}" job_class = get_job(class_path) job_model = Job.objects.get_for_class_path(class_path) job_model.enabled = True job_model.validated_save() return (job_class, job_model)
def test_related_object(self): """Test that the `related_object` property is computed properly.""" # Case 1: Job, identified by class_path. with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): job_class = get_job("local/test_pass/TestPass") job_result = JobResult( name=job_class.class_path, obj_type=ContentType.objects.get(app_label="extras", model="job"), job_id=uuid.uuid4(), ) # Can't just do self.assertEqual(job_result.related_object, job_class) here for some reason self.assertEqual(type(job_result.related_object), type) self.assertTrue(issubclass(job_result.related_object, Job)) self.assertEqual(job_result.related_object.class_path, "local/test_pass/TestPass") job_result.name = "local/no_such_job/NoSuchJob" self.assertIsNone(job_result.related_object) job_result.name = "not-a-class-path" self.assertIsNone(job_result.related_object) # Case 2: GitRepository, identified by name. repo = GitRepository( name="Test Git Repository", slug="test-git-repo", remote_url="http://localhost/git.git", username="******", ) repo.save(trigger_resync=False) job_result = JobResult( name=repo.name, obj_type=ContentType.objects.get_for_model(repo), job_id=uuid.uuid4(), ) self.assertEqual(job_result.related_object, repo) job_result.name = "No such GitRepository" self.assertIsNone(job_result.related_object) # Case 3: Related object with no name, identified by PK/ID ip_address = IPAddress.objects.create(address="1.1.1.1/32") job_result = JobResult( name="irrelevant", obj_type=ContentType.objects.get_for_model(ip_address), job_id=ip_address.pk, ) self.assertEqual(job_result.related_object, ip_address) job_result.job_id = uuid.uuid4() self.assertIsNone(job_result.related_object)
def test_ip_address_vars(self): """ Test that IPAddress variable fields behave as expected. This test case exercises the following types for both IPv4 and IPv6: - IPAddressVar - IPAddressWithMaskVar - IPNetworkVar """ with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): module = "test_ipaddress_vars" name = "TestIPAddresses" job_class = get_job(f"local/{module}/{name}") # Fill out the form form_data = dict( ipv4_address="1.2.3.4", ipv4_with_mask="1.2.3.4/32", ipv4_network="1.2.3.0/24", ipv6_address="2001:db8::1", ipv6_with_mask="2001:db8::1/64", ipv6_network="2001:db8::/64", ) form = job_class().as_form(form_data) self.assertTrue(form.is_valid()) # Prepare the job data job_result = JobResult.objects.create( name=job_class.class_path, obj_type=self.job_content_type, user=None, job_id=uuid.uuid4(), ) data = job_class.serialize_data(form.cleaned_data) # Run the job and extract the job payload data run_job(data=data, request=None, commit=False, job_result_pk=job_result.pk) job_result.refresh_from_db() job_payload = job_result.data["run"]["log"][0][ 2] # Indexing makes me sad. job_result_data = json.loads(job_payload) # Assert stuff self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_COMPLETED) self.assertEqual(form_data, job_result_data)
def test_object_vars(self): """ Test that Object variable fields behave as expected. """ with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): module = "test_object_vars" name = "TestObjectVars" job_class = get_job(f"local/{module}/{name}") d = DeviceRole.objects.create(name="role", slug="role") # Prepare the job data job_result = JobResult.objects.create( name=job_class.class_path, obj_type=self.job_content_type, user=None, job_id=uuid.uuid4(), ) data = { "role": { "name": "role" }, "roles": [d.pk], } # Run the job and extract the job payload data # See test_ip_address_vars as to why we are changing commit=True and request=self.request. run_job(data=data, request=self.request, commit=True, job_result_pk=job_result.pk) job_result.refresh_from_db() # Test storing additional data in job job_result_data = job_result.data["object_vars"] info_log = JobLogEntry.objects.filter( job_result=job_result, log_level=LogLevelChoices.LOG_INFO, grouping="run").first() # Assert stuff self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_COMPLETED) self.assertEqual({ "role": str(d.pk), "roles": [str(d.pk)] }, job_result_data) self.assertEqual(info_log.log_object, "Role: role") self.assertEqual(job_result.data["output"], "\nNice Roles, bro.")
def test_read_only_no_commit_field(self): """ Job read only test commit field is not shown. """ module = "test_read_only_no_commit_field" name = "TestReadOnlyNoCommitField" job_class = get_job(f"local/{module}/{name}") form = job_class().as_form() self.assertHTMLEqual( form.as_table(), """<tr><th><label for="id_var">Var:</label></th><td> <input class="form-control form-control" id="id_var" name="var" placeholder="None" required type="text"> <br><span class="helptext">Hello</span><input id="id__commit" name="_commit" type="hidden" value="False"></td></tr>""", )
def test_run_job_pass(self): """Test that file upload succeeds; job SUCCEEDS; and files are deleted.""" with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): job_name = "local/test_file_upload_pass/TestFileUploadPass" job_class = get_job(job_name) job_result = JobResult.objects.create( name=job_class.class_path, obj_type=self.job_content_type, user=None, job_id=uuid.uuid4(), ) # Serialize the file to FileProxy data = {"file": self.dummy_file} form = job_class().as_form(files=data) self.assertTrue(form.is_valid()) serialized_data = job_class.serialize_data(form.cleaned_data) # Assert that the file was serialized to a FileProxy self.assertTrue(isinstance(serialized_data["file"], uuid.UUID)) self.assertEqual(serialized_data["file"], FileProxy.objects.latest().pk) self.assertEqual(FileProxy.objects.count(), 1) # Run the job # See test_ip_address_vars as to why we are changing commit=True and request=self.request. run_job(data=serialized_data, request=self.request, commit=True, job_result_pk=job_result.pk) job_result.refresh_from_db() warning_log = JobLogEntry.objects.filter( job_result=job_result, log_level=LogLevelChoices.LOG_WARNING, grouping="run").first() # Assert that file contents were correctly read self.assertEqual( warning_log.message, f"File contents: {self.file_contents}") # "File contents: ..." # Assert that FileProxy was cleaned up self.assertEqual(FileProxy.objects.count(), 0)
def test_run_job_fail(self): """Test that file upload succeeds; job FAILS; files deleted.""" with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): job_name = "local/test_file_upload_fail/TestFileUploadFail" job_class = get_job(job_name) job_result = JobResult.objects.create( name=job_class.class_path, obj_type=self.job_content_type, user=None, job_id=uuid.uuid4(), ) # Serialize the file to FileProxy data = {"file": self.dummy_file} form = job_class().as_form(files=data) self.assertTrue(form.is_valid()) serialized_data = job_class.serialize_data(form.cleaned_data) # Assert that the file was serialized to a FileProxy self.assertTrue(isinstance(serialized_data["file"], uuid.UUID)) self.assertEqual(serialized_data["file"], FileProxy.objects.latest().pk) self.assertEqual(FileProxy.objects.count(), 1) # Run the job run_job(data=serialized_data, request=None, commit=False, job_result_pk=job_result.pk) job_result.refresh_from_db() # Assert that file contents were correctly read self.assertEqual( job_result.data["run"]["log"][0][2], f"File contents: {self.file_contents}" # "File contents: ..." ) # Also ensure the standard log message about aborting the transaction is present self.assertEqual( job_result.data["run"]["log"][-1][-1], "Database changes have been reverted due to error.") # Assert that FileProxy was cleaned up self.assertEqual(FileProxy.objects.count(), 0)
def test_object_vars(self): """ Test that Object variable fields behave as expected. """ with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): module = "test_object_vars" name = "TestObjectVars" job_class = get_job(f"local/{module}/{name}") d = DeviceRole.objects.create(name="role", slug="role") # Prepare the job data job_result = JobResult.objects.create( name=job_class.class_path, obj_type=self.job_content_type, user=None, job_id=uuid.uuid4(), ) data = { "role": { "name": "role" }, "roles": [d.pk], } # Run the job and extract the job payload data run_job(data=data, request=None, commit=False, job_result_pk=job_result.pk) job_result.refresh_from_db() job_payload = job_result.data["run"]["log"][0][ 2] # Indexing makes me sad. job_result_data = json.loads(job_payload) # Assert stuff self.assertEqual(job_result.status, JobResultStatusChoices.STATUS_COMPLETED) self.assertEqual({ "role": str(d.pk), "roles": [str(d.pk)] }, job_result_data)
def related_object(self): """Get the related object, if any, identified by the `obj_type`, `name`, and/or `job_id` fields. If `obj_type` is extras.Job, then the `name` is used to look up an extras.jobs.Job subclass based on the `class_path` of the Job subclass. Note that this is **not** the extras.models.Job model class nor an instance thereof. Else, if the the model class referenced by `obj_type` has a `name` field, our `name` field will be used to look up a corresponding model instance. This is used, for example, to look up a related `GitRepository`; more generally it can be used by any model that 1) has a unique `name` field and 2) needs to have a many-to-one relationship between JobResults and model instances. Else, the `obj_type` and `job_id` will be used together as a quasi-GenericForeignKey to look up a model instance whose PK corresponds to the `job_id`. This behavior is currently unused in the Nautobot core, but may be of use to plugin developers wishing to create JobResults that have a one-to-one relationship to plugin model instances. This method is potentially rather slow as get_job() may need to actually load the Job class from disk; consider carefully whether you actually need to use it. """ from nautobot.extras.jobs import get_job # needed here to avoid a circular import issue if self.obj_type == get_job_content_type(): # Related object is an extras.Job subclass, our `name` matches its `class_path` return get_job(self.name) model_class = self.obj_type.model_class() if model_class is not None: if hasattr(model_class, "name"): # See if we have a many-to-one relationship from JobResult to model_class record, based on `name` try: return model_class.objects.get(name=self.name) except model_class.DoesNotExist: pass # See if we have a one-to-one relationship from JobResult to model_class record based on `job_id` try: return model_class.objects.get(id=self.job_id) except model_class.DoesNotExist: pass return None
def test_run_job_pass(self): """Test that file upload succeeds; job SUCCEEDS; and files are deleted.""" with self.settings(JOBS_ROOT=os.path.join(settings.BASE_DIR, "extras/tests/dummy_jobs")): job_name = "local/test_file_upload_pass/TestFileUploadPass" job_class = get_job(job_name) job_result = JobResult.objects.create( name=job_class.class_path, obj_type=self.job_content_type, user=None, job_id=uuid.uuid4(), ) # Serialize the file to FileProxy data = {"file": self.dummy_file} form = job_class().as_form(files=data) self.assertTrue(form.is_valid()) serialized_data = job_class.serialize_data(form.cleaned_data) # Assert that the file was serialized to a FileProxy self.assertTrue(isinstance(serialized_data["file"], uuid.UUID)) self.assertEqual(serialized_data["file"], FileProxy.objects.latest().pk) self.assertEqual(FileProxy.objects.count(), 1) # Run the job run_job(data=serialized_data, request=None, commit=False, job_result_pk=job_result.pk) job_result.refresh_from_db() # Assert that file contents were correctly read self.assertEqual( job_result.data["run"]["log"][0][2], f"File contents: {self.file_contents}" # "File contents: ..." ) # Assert that FileProxy was cleaned up self.assertEqual(FileProxy.objects.count(), 0)
def handle(self, *args, **kwargs): """Add handler for run_config_compliance.""" job_class = get_job( "plugins/nautobot_golden_config.jobs/ComplianceJob") job_runner(self, job_class, kwargs.get("device"), kwargs.get("user"))
def enqueue_job(cls, func, name, obj_type, user, *args, celery_kwargs=None, schedule=None, **kwargs): """ Create a JobResult instance and enqueue a job using the given callable func: The callable object to be enqueued for execution name: Name for the JobResult instance - corresponds to the desired Job class's "class_path" attribute, if obj_type is extras.Job; for other funcs and obj_types it may differ. obj_type: ContentType to link to the JobResult instance obj_type user: User object to link to the JobResult instance celery_kwargs: Dictionary of kwargs to pass as **kwargs to Celery when job is queued args: additional args passed to the callable schedule: Optional ScheduledJob instance to link to the JobResult kwargs: additional kwargs passed to the callable """ job_result = cls.objects.create(name=name, obj_type=obj_type, user=user, job_id=uuid.uuid4(), schedule=schedule) kwargs["job_result_pk"] = job_result.pk # Prepare kwargs that will be sent to Celery if celery_kwargs is None: celery_kwargs = {} if obj_type.app_label == "extras" and obj_type.model.lower() == "job": try: job_model = Job.objects.get_for_class_path(name) if job_model.soft_time_limit > 0: celery_kwargs[ "soft_time_limit"] = job_model.soft_time_limit if job_model.time_limit > 0: celery_kwargs["time_limit"] = job_model.time_limit job_result.job_model = job_model job_result.save() except Job.DoesNotExist: # 2.0 TODO: remove this fallback logic, database records should always exist from nautobot.extras.jobs import get_job # needed here to avoid a circular import issue job_class = get_job(name) if job_class is not None: logger.error( "No Job instance found in the database corresponding to %s", name) if hasattr(job_class.Meta, "soft_time_limit"): celery_kwargs[ "soft_time_limit"] = job_class.Meta.soft_time_limit if hasattr(job_class.Meta, "time_limit"): celery_kwargs["time_limit"] = job_class.Meta.time_limit else: logger.error( "Neither a Job database record nor a Job source class were found for %s", name) func.apply_async(args=args, kwargs=kwargs, task_id=str(job_result.job_id), **celery_kwargs) return job_result
def handle(self, *args, **kwargs): """Add handler for run_generate_config.""" job_class = get_job("plugins/nautobot_golden_config.jobs/IntendedJob") job_runner(self, job_class, kwargs.get("device"), kwargs.get("user"))
def handle(self, *args, **options): if "/" not in options["job"]: raise CommandError( 'Job must be specified in the form "grouping_name/module_name/JobClassName"' ) job_class = get_job(options["job"]) if not job_class: raise CommandError('Job "%s" not found' % options["job"]) job_content_type = ContentType.objects.get(app_label="extras", model="job") # Run the job and create a new JobResult self.stdout.write("[{:%H:%M:%S}] Running {}...".format( timezone.now(), job_class.class_path)) job_result = JobResult.enqueue_job( run_job, job_class.class_path, job_content_type, None, data= {}, # TODO: parsing CLI args into a data dictionary is not currently implemented request=None, commit=options["commit"], ) # Wait on the job to finish while job_result.status not in JobResultStatusChoices.TERMINAL_STATE_CHOICES: time.sleep(1) job_result = JobResult.objects.get(pk=job_result.pk) # Report on success/failure for test_name, attrs in job_result.data.items(): if test_name in ["total", "output"]: continue self.stdout.write( "\t{}: {} success, {} info, {} warning, {} failure".format( test_name, attrs["success"], attrs["info"], attrs["warning"], attrs["failure"], )) for log_entry in attrs["log"]: status = log_entry[1] if status == "success": status = self.style.SUCCESS(status) elif status == "info": status = status elif status == "warning": status = self.style.WARNING(status) elif status == "failure": status = self.style.NOTICE(status) if log_entry[2]: # object associated with log entry self.stdout.write( f"\t\t{status}: {log_entry[2]}: {log_entry[-1]}") else: self.stdout.write(f"\t\t{status}: {log_entry[-1]}") if job_result.data["output"]: self.stdout.write(job_result.data["output"]) if job_result.status == JobResultStatusChoices.STATUS_FAILED: status = self.style.ERROR("FAILED") elif job_result.status == JobResultStatusChoices.STATUS_ERRORED: status = self.style.ERROR("ERRORED") else: status = self.style.SUCCESS("SUCCESS") self.stdout.write("[{:%H:%M:%S}] {}: {}".format( timezone.now(), job_class.class_path, status)) # Wrap things up self.stdout.write("[{:%H:%M:%S}] {}: Duration {}".format( timezone.now(), job_class.class_path, job_result.duration)) self.stdout.write("[{:%H:%M:%S}] Finished".format(timezone.now()))
def _get_job_class(self, class_path): job_class = get_job(class_path) if job_class is None: raise Http404 return job_class
def handle(self, *args, **options): if "/" not in options["job"]: raise CommandError( 'Job must be specified in the form "grouping_name/module_name/JobClassName"' ) job_class = get_job(options["job"]) if not job_class: raise CommandError('Job "%s" not found' % options["job"]) user = None request = None if options["commit"] and not options["username"]: # Job execution with commit=True uses change_logging(), which requires a user as the author of any changes raise CommandError("--username is mandatory when --commit is used") if options["username"]: User = get_user_model() try: user = User.objects.get(username=options["username"]) except User.DoesNotExist as exc: raise CommandError("No such user") from exc request = RequestFactory().request( SERVER_NAME="nautobot_server_runjob") request.id = uuid.uuid4() request.user = user job_content_type = get_job_content_type() # Run the job and create a new JobResult self.stdout.write("[{:%H:%M:%S}] Running {}...".format( timezone.now(), job_class.class_path)) job_result = JobResult.enqueue_job( run_job, job_class.class_path, job_content_type, user, data= {}, # TODO: parsing CLI args into a data dictionary is not currently implemented request=copy_safe_request(request) if request else None, commit=options["commit"], ) # Wait on the job to finish while job_result.status not in JobResultStatusChoices.TERMINAL_STATE_CHOICES: time.sleep(1) job_result = JobResult.objects.get(pk=job_result.pk) # Report on success/failure groups = set( JobLogEntry.objects.filter(job_result=job_result).values_list( "grouping", flat=True)) for group in sorted(groups): logs = JobLogEntry.objects.filter(job_result__pk=job_result.pk, grouping=group) success_count = logs.filter( log_level=LogLevelChoices.LOG_SUCCESS).count() info_count = logs.filter( log_level=LogLevelChoices.LOG_INFO).count() warning_count = logs.filter( log_level=LogLevelChoices.LOG_WARNING).count() failure_count = logs.filter( log_level=LogLevelChoices.LOG_FAILURE).count() self.stdout.write( "\t{}: {} success, {} info, {} warning, {} failure".format( group, success_count, info_count, warning_count, failure_count, )) for log_entry in logs: status = log_entry.log_level if status == "success": status = self.style.SUCCESS(status) elif status == "info": status = status elif status == "warning": status = self.style.WARNING(status) elif status == "failure": status = self.style.NOTICE(status) if log_entry.log_object: self.stdout.write( f"\t\t{status}: {log_entry.log_object}: {log_entry.message}" ) else: self.stdout.write(f"\t\t{status}: {log_entry.message}") if job_result.data["output"]: self.stdout.write(job_result.data["output"]) if job_result.status == JobResultStatusChoices.STATUS_FAILED: status = self.style.ERROR("FAILED") elif job_result.status == JobResultStatusChoices.STATUS_ERRORED: status = self.style.ERROR("ERRORED") else: status = self.style.SUCCESS("SUCCESS") self.stdout.write("[{:%H:%M:%S}] {}: {}".format( timezone.now(), job_class.class_path, status)) # Wrap things up self.stdout.write("[{:%H:%M:%S}] {}: Duration {}".format( timezone.now(), job_class.class_path, job_result.duration)) self.stdout.write("[{:%H:%M:%S}] Finished".format(timezone.now()))