Exemplo n.º 1
0
    def test_main_calls_zero_disk(self):
        self.patch_args(False, False)
        disks = {
            factory.make_name("disk").encode("ascii"): {}
            for _ in range(3)
        }
        self.patch(maas_wipe, "get_disk_info").return_value = disks

        zero_disk = self.patch(maas_wipe, "zero_disk")
        mock_try = self.patch(maas_wipe, "try_secure_erase")
        mock_try.return_value = False
        maas_wipe.main()

        wipe_calls = [
            call(disk)
            for disk in disks.keys()
        ]
        self.assertThat(mock_try, MockNotCalled())
        self.assertThat(zero_disk, MockCallsMatch(*wipe_calls))
Exemplo n.º 2
0
    def test_populates_if_tag_is_defined(self):
        post_commit_do = self.patch(tag_module, "post_commit_do")

        tag = Tag(name=factory.make_name("tag"), definition="//foo")
        tag.save(populate=False)

        self.assertTrue(tag.is_defined)
        self.assertThat(post_commit_do, MockNotCalled())
        tag._populate_nodes_later()
        self.assertThat(
            post_commit_do,
            MockCalledOnceWith(
                reactor.callLater,
                0,
                deferToDatabase,
                populate_tags.populate_tags,
                tag,
            ),
        )
Exemplo n.º 3
0
    def test_main_calls_try_secure_erase_for_all_disks(self):
        self.patch_args(True, False)
        disks = {
            factory.make_name("disk").encode("ascii"): {}
            for _ in range(3)
        }
        self.patch(maas_wipe, "get_disk_info").return_value = disks

        mock_zero = self.patch(maas_wipe, "zero_disk")
        mock_try = self.patch(maas_wipe, "try_secure_erase")
        mock_try.return_value = True
        maas_wipe.main()

        calls = [
            call(disk, info)
            for disk, info in disks.items()
        ]
        self.assertThat(mock_try, MockCallsMatch(*calls))
        self.assertThat(mock_zero, MockNotCalled())
Exemplo n.º 4
0
 def test_queueMessages_handled_invalid_nodekey_with_instant_msg(self):
     worker = StatusWorkerService(sentinel.dbtasks)
     mock_processMessage = self.patch(worker, "_processMessage")
     contents = b"These are the contents of the file."
     encoded_content = encode_as_base64(bz2.compress(contents))
     message = self.make_message()
     message["files"] = [
         {
             "path": "sample.txt",
             "encoding": "uuencode",
             "compression": "bzip2",
             "content": encoded_content,
         }
     ]
     nodes_with_tokens = yield deferToDatabase(self.make_nodes_with_tokens)
     node, token = nodes_with_tokens[0]
     yield deferToDatabase(token.delete)
     yield worker.queueMessage(token.key, message)
     self.assertThat(mock_processMessage, MockNotCalled())
Exemplo n.º 5
0
    def test_list_supported_drives_ignores_iscsiadm_timeout(self):
        mock_print = self.patch(smartctl, 'print')
        mock_check_output = self.patch(smartctl, 'check_output')
        mock_check_output.side_effect = [
            TimeoutExpired('iscsiadm', 60),
            b'/dev/sda -d scsi # /dev/sda, SCSI device',
            b'NAME MODEL            SERIAL\n'
            b'sda  HGST HDN724040AL abc123',
        ]
        mock_popen = self.patch(smartctl, 'Popen')
        mock_popen.return_value = Popen(
            ['echo', 'SMART support is: Available'], stdout=PIPE)

        self.assertItemsEqual([['/dev/sda', '-d', 'scsi']],
                              smartctl.list_supported_drives())
        self.assertThat(
            mock_check_output,
            MockCallsMatch(
                call(['sudo', '-n', 'iscsiadm', '-m', 'session', '-P', '3'],
                     timeout=smartctl.TIMEOUT,
                     stderr=DEVNULL),
                call(['sudo', '-n', 'smartctl', '--scan-open'],
                     timeout=smartctl.TIMEOUT),
                call([
                    'lsblk',
                    '--exclude',
                    '1,2,7',
                    '-d',
                    '-l',
                    '-o',
                    'NAME,MODEL,SERIAL',
                    '-x',
                    'NAME',
                ],
                     timeout=smartctl.TIMEOUT,
                     stderr=DEVNULL)))
        self.assertThat(
            mock_popen,
            MockCalledOnceWith(
                ['sudo', '-n', 'smartctl', '-i', '/dev/sda', '-d', 'scsi'],
                stdout=PIPE,
                stderr=DEVNULL))
        self.assertThat(mock_print, MockNotCalled())
Exemplo n.º 6
0
    def test_run_smartctl_selftest_sets_failure_on_exec_fail_test_start(self):
        drive = factory.make_name('drive')
        test = factory.make_name('test')
        run_smartctl = smartctl.RunSmartCtl([drive], test)
        mock_check_call = self.patch(smartctl, 'check_call')
        mock_check_call.side_effect = CalledProcessError(1, 'smartctl')
        mock_check_output = self.patch(smartctl, 'check_output')

        run_smartctl._run_smartctl_selftest()

        self.assertTrue(run_smartctl.running_test_failed)
        self.assertThat(
            mock_check_call,
            MockCalledOnceWith(
                ['sudo', '-n', 'smartctl', '-s', 'on', '-t', test, drive],
                timeout=smartctl.TIMEOUT,
                stdout=DEVNULL,
                stderr=DEVNULL))
        self.assertThat(mock_check_output, MockNotCalled())
Exemplo n.º 7
0
 def test_sends_event_later(self):
     send_event = self.patch(tftp_module, "send_node_event_ip_address")
     ip = factory.make_ip_address()
     self.patch(tftp_module.tftp, "get_remote_address").return_value = (
         ip,
         sentinel.port,
     )
     clock = Clock()
     log_request(sentinel.filename, clock)
     self.assertThat(send_event, MockNotCalled())
     clock.advance(0.0)
     self.assertThat(
         send_event,
         MockCalledOnceWith(
             ip_address=ip,
             description=sentinel.filename,
             event_type=EVENT_TYPES.NODE_TFTP_REQUEST,
         ),
     )
Exemplo n.º 8
0
    def test_redfish_request_raises_error_on_response_code_above_400(self):
        driver = RedfishPowerDriver()
        context = make_context()
        url = driver.get_url(context)
        uri = join(url, b"redfish/v1/Systems")
        headers = driver.make_auth_headers(**context)
        mock_agent = self.patch(redfish_module, "Agent")
        mock_agent.return_value.request = Mock()
        expected_headers = Mock()
        expected_headers.code = HTTPStatus.BAD_REQUEST
        expected_headers.headers = "Testing Headers"
        mock_agent.return_value.request.return_value = succeed(
            expected_headers
        )
        mock_readBody = self.patch(redfish_module, "readBody")

        with ExpectedException(PowerActionError):
            yield driver.redfish_request(b"GET", uri, headers)
        self.assertThat(mock_readBody, MockNotCalled())
Exemplo n.º 9
0
 def test_POST_create_returns_machine_with_matching_power_parameters(self):
     mock_create_machine = self.patch(machines_module, "create_machine")
     hostname = factory.make_name("hostname")
     architecture = make_usable_architecture(self)
     power_type = "ipmi"
     power_parameters = {
         "power_address": factory.make_ip_address(),
         "power_user": factory.make_name("power-user"),
         "power_pass": factory.make_name("power-pass"),
         "power_driver": "LAN_2_0",
         "mac_address": "",
         "power_boot_type": "auto",
     }
     machine = factory.make_Machine(
         hostname=hostname,
         status=NODE_STATUS.NEW,
         architecture="",
         power_type=power_type,
         power_parameters=power_parameters,
     )
     # Simulate creating the MAAS IPMI user
     power_parameters["power_user"] = "******"
     power_parameters["power_pass"] = factory.make_name("power-pass")
     response = self.client.post(
         reverse("machines_handler"),
         {
             "hostname": "maas-enlistment",
             "architecture": architecture,
             "power_type": power_type,
             "mac_addresses": factory.make_mac_address(),
             "power_parameters": json.dumps(power_parameters),
         },
     )
     self.assertEqual(http.client.OK, response.status_code)
     machine = reload_object(machine)
     self.assertEqual(hostname, machine.hostname)
     self.assertEqual(architecture, machine.architecture)
     self.assertDictContainsSubset(machine.bmc.power_parameters,
                                   power_parameters)
     self.assertThat(mock_create_machine, MockNotCalled())
     self.assertEqual(machine.system_id,
                      json_load_bytes(response.content)["system_id"])
Exemplo n.º 10
0
    def test_changing_status_of_node_emits_event(self):
        self.patch_autospec(power, "update_power_state_of_node_soon")
        old_status = NODE_STATUS.COMMISSIONING
        node = factory.make_Node(status=old_status, power_type="virsh")
        node.status = get_failed_status(old_status)

        with post_commit_hooks:
            node.save()
            # update_power_state_of_node_soon is registered as a post-commit
            # task, so it's not called immediately.
            self.expectThat(
                power.update_power_state_of_node_soon, MockNotCalled()
            )

        # One post-commit hooks have been fired, then it's called.
        post_commit_hooks.fire()
        self.assertThat(
            power.update_power_state_of_node_soon,
            MockCalledOnceWith(node.system_id),
        )
Exemplo n.º 11
0
 def test_status_erasure_failure_does_not_populate_tags(self):
     populate_tags_for_single_node = self.patch(
         api, "populate_tags_for_single_node"
     )
     node = factory.make_Node(
         interface=True, status=NODE_STATUS.DISK_ERASING
     )
     payload = {
         "event_type": "finish",
         "result": "FAILURE",
         "origin": "curtin",
         "name": "cmd-erase",
         "description": "Erasing disk",
         "timestamp": datetime.utcnow(),
     }
     self.processMessage(node, payload)
     self.assertEqual(
         NODE_STATUS.FAILED_DISK_ERASING, reload_object(node).status
     )
     self.assertThat(populate_tags_for_single_node, MockNotCalled())
Exemplo n.º 12
0
 def test_open_for_update_uses_backend_as_context_manager(self):
     config_file = self.make_file()
     backend = self.patch(ExampleConfiguration, "backend")
     with ExampleConfiguration.open_for_update(config_file) as config:
         # The backend was opened using open_for_update() too.
         self.assertThat(
             backend.open_for_update, MockCalledOnceWith(config_file)
         )
         # The object returned from backend.open_for_update() has been used
         # as the context manager, providing `config`.
         backend_ctx = backend.open_for_update.return_value
         self.assertThat(
             config.store, Is(backend_ctx.__enter__.return_value)
         )
         # We're within the context, as expected.
         self.assertThat(backend_ctx.__exit__, MockNotCalled())
     # The backend context has also been exited.
     self.assertThat(
         backend_ctx.__exit__, MockCalledOnceWith(None, None, None)
     )
Exemplo n.º 13
0
 def test_disable_disables_all_signals(self):
     manager = SignalsManager()
     signals = [self.make_Signal(), self.make_Signal()]
     for signal in signals:
         manager.add(signal)
     manager.disable()
     self.assertThat(
         signals,
         AllMatch(
             MatchesAll(
                 AfterPreprocessing(
                     (lambda signal: signal.connect), MockNotCalled()
                 ),
                 AfterPreprocessing(
                     (lambda signal: signal.disconnect),
                     MockCalledOnceWith(),
                 ),
             )
         ),
     )
Exemplo n.º 14
0
 def test_skips_ipv6_interfaces(self):
     clock = Clock()
     interface_name = factory.make_name("eth")
     interfaces = {
         interface_name: {
             "enabled": True,
             "links": [{"address": "2001:db8::1/64"}],
         }
     }
     mock_interfaces = self.patch(
         dhcp_probe_service, "get_all_interfaces_definition"
     )
     mock_interfaces.return_value = interfaces
     service = DHCPProbeService(sentinel.service, clock)
     try_get_client = self.patch(service, "_tryGetClient")
     try_get_client.getClientNow = Mock()
     probe_interface = self.patch(dhcp_probe_service, "probe_interface")
     yield service.startService()
     yield service.stopService()
     self.assertThat(probe_interface, MockNotCalled())
Exemplo n.º 15
0
    def test_is_silent_and_does_nothing_when_rack_is_not_recognised(self):
        # Patch the logger in the clusterservice so no log messages are printed
        # because the tests run in debug mode.
        self.patch(common.log, "debug")
        self.useFixture(MAASRootFixture())
        rpc_service, protocol = yield prepareRegion(self)
        protocol.GetControllerType.side_effect = exceptions.NoSuchNode
        ntp = external.RackNTP()
        service = make_startable_RackExternalService(
            self, StubClusterClientService(), reactor, [("NTP", ntp)])
        self.patch_autospec(ntp, "_tryUpdate")

        yield service.startService()
        self.addCleanup((yield service.stopService))

        with TwistedLoggerFixture() as logger:
            yield service._tryUpdate()

        self.assertThat(logger.output, Equals(""))
        self.assertThat(ntp._tryUpdate, MockNotCalled())
Exemplo n.º 16
0
 def test__sends_unicast_beacon(self):
     self.run_command("-v", "-s", "1.1.1.1", "-t", "42", "-p", "4242",
                      "127.0.0.1")
     self.assertThat(
         self.protocol_mock,
         MockCalledOnceWith(
             ANY,
             debug=True,
             interface="1.1.1.1",
             port=4242,
             process_incoming=True,
             interfaces=TEST_INTERFACES,
         ),
     )
     self.assertThat(self.fake_protocol.send_multicast_beacons,
                     MockNotCalled())
     self.assertThat(
         self.fake_protocol.send_beacon,
         MockCalledOnceWith(ANY, ("::ffff:127.0.0.1", 5240)),
     )
Exemplo n.º 17
0
    def test_schedule_arranges_for_later_run(self):
        # Avoid deferring to the database.
        self.patch(boot_images_module, "deferToDatabase", maybeDeferred)
        # Avoid actually initiating a run.
        self.patch_autospec(RackControllersImporter, "run")

        system_ids = [factory.make_name("system_id") for _ in range(3)]
        sources = [sentinel.source]
        proxy = factory.make_simple_http_url()

        conc = random.randint(1, 9)
        delay = random.randint(1, 9)

        clock = Clock()
        delayed_call = RackControllersImporter.schedule(
            system_ids=system_ids,
            sources=sources,
            proxy=proxy,
            delay=delay,
            concurrency=conc,
            clock=clock,
        )

        # The call is scheduled for `delay` seconds from now.
        self.assertThat(delayed_call, MatchesStructure(time=Equals(delay)))
        self.assertThat(RackControllersImporter.run, MockNotCalled())
        clock.advance(delay)
        self.assertThat(
            RackControllersImporter.run, MockCalledOnceWith(ANY, conc)
        )

        # The system_ids, sources, and proxy were all passed through.
        [importer, _] = RackControllersImporter.run.call_args[0]
        self.assertThat(
            importer,
            MatchesStructure(
                system_ids=Equals(tuple(system_ids)),
                sources=Is(sources),
                proxy=Equals(urlparse(proxy)),
            ),
        )
Exemplo n.º 18
0
 def test_returns_files_from_cache(self):
     with tempdir() as cache_dir:
         store = FileStore(cache_dir)
         tar_xz, files = self.make_tar_xz(cache_dir)
         sha256, size = self.get_file_info(tar_xz)
         checksums = {'sha256': sha256}
         with open(tar_xz, 'rb') as f:
             content_source = ChecksummingContentSource(f, checksums, size)
             download_resources.extract_archive_tar(
                 store, os.path.basename(tar_xz), sha256, checksums, size,
                 content_source)
             mocked_tar = self.patch(download_resources.tarfile, 'open')
             cached_files = download_resources.extract_archive_tar(
                 store, os.path.basename(tar_xz), sha256, checksums, size,
                 content_source)
             self.assertThat(mocked_tar, MockNotCalled())
             for f, info in files.items():
                 cached_file = os.path.join(cache_dir,
                                            '%s-%s' % (f, sha256))
                 expected_cached_file = (cached_file, f)
                 self.assertIn(expected_cached_file, cached_files)
Exemplo n.º 19
0
    def test_is_silent_does_nothing_but_saves_config_when_is_region(self):
        self.useFixture(MAASRootFixture())
        rpc_service, _ = yield prepareRegion(self, is_region=True)
        service = ntp.RackNetworkTimeProtocolService(rpc_service, reactor)
        self.patch_autospec(ntp, "configure_rack")  # No-op configuration.

        # There is no most recently applied configuration.
        self.assertThat(service._configuration, Is(None))

        with TwistedLoggerFixture() as logger:
            yield service._tryUpdate()

        # The most recently applied configuration is set, though it was not
        # actually "applied" because this host was configured as a region+rack
        # controller, and the rack should not attempt to manage the NTP server
        # on a region+rack.
        self.assertThat(service._configuration, IsInstance(ntp._Configuration))
        # The configuration was not applied.
        self.assertThat(ntp.configure_rack, MockNotCalled())
        # Nothing was logged; there's no need for lots of chatter.
        self.assertThat(logger.output, Equals(""))
Exemplo n.º 20
0
 def test_skips_disabled_interfaces(self):
     clock = Clock()
     interface_name = factory.make_name("eth")
     interfaces = {
         interface_name: {
             "enabled": False,
             "links": [{
                 "address": "10.0.0.1/24"
             }]
         }
     }
     mock_interfaces = self.patch(dhcp_probe_service,
                                  'get_all_interfaces_definition')
     mock_interfaces.return_value = interfaces
     service = DHCPProbeService(sentinel.service, clock)
     try_get_client = self.patch(service, '_tryGetClient')
     try_get_client.getClientNow = Mock()
     probe_interface = self.patch(dhcp_probe_service, 'probe_interface')
     yield service.startService()
     yield service.stopService()
     self.assertThat(probe_interface, MockNotCalled())
Exemplo n.º 21
0
 def test_POST_create_returns_machine_with_matching_power_parameters(self):
     mock_create_machine = self.patch(machines_module, "create_machine")
     hostname = factory.make_name("hostname")
     architecture = make_usable_architecture(self)
     power_type = 'ipmi'
     power_parameters = {
         "power_address": factory.make_ip_address(),
         "power_user": factory.make_name("power-user"),
         "power_pass": factory.make_name("power-pass"),
         "power_driver": 'LAN_2_0',
         "mac_address": '',
         "power_boot_type": 'auto',
         }
     machine = factory.make_Machine(
         hostname=hostname, status=NODE_STATUS.NEW,
         architecture='', power_type=power_type,
         power_parameters=power_parameters)
     # Simulate creating the MAAS IPMI user
     power_parameters["power_user"] = "******"
     power_parameters["power_pass"] = factory.make_name("power-pass")
     response = self.client.post(
         reverse('machines_handler'),
         {
             'hostname': 'maas-enlistment',
             'architecture': architecture,
             'power_type': power_type,
             'mac_addresses': factory.make_mac_address(),
             'power_parameters': json.dumps(power_parameters),
         })
     self.assertEqual(http.client.OK, response.status_code)
     machine = reload_object(machine)
     self.assertEqual(hostname, machine.hostname)
     self.assertEqual(architecture, machine.architecture)
     self.assertDictContainsSubset(
         machine.bmc.power_parameters, power_parameters)
     node_metadata = NodeMetadata.objects.get(node=machine, key='enlisting')
     self.assertEqual(node_metadata.value, 'True')
     self.assertThat(mock_create_machine, MockNotCalled())
     self.assertEqual(
         machine.system_id, json_load_bytes(response.content)['system_id'])
Exemplo n.º 22
0
    def test_uses_param_runtime(self):
        node, script_set = self.make_node()
        current_time = now()
        script_set.last_ping = current_time
        script_set.save()
        passed_script_result = factory.make_ScriptResult(
            script_set=script_set, status=SCRIPT_STATUS.PASSED
        )
        failed_script_result = factory.make_ScriptResult(
            script_set=script_set, status=SCRIPT_STATUS.FAILED
        )
        pending_script_result = factory.make_ScriptResult(
            script_set=script_set, status=SCRIPT_STATUS.PENDING
        )
        script = factory.make_Script(timeout=timedelta(minutes=2))
        running_script_result = factory.make_ScriptResult(
            script_set=script_set,
            status=SCRIPT_STATUS.RUNNING,
            script=script,
            started=current_time - timedelta(minutes=50),
            parameters={"runtime": {"type": "runtime", "value": 60 * 60}},
        )

        mark_nodes_failed_after_missing_script_timeout(current_time, 20)
        node = reload_object(node)

        self.assertEquals(self.status, node.status)
        self.assertThat(self.mock_stop, MockNotCalled())
        self.assertEquals(
            SCRIPT_STATUS.PASSED, reload_object(passed_script_result).status
        )
        self.assertEquals(
            SCRIPT_STATUS.FAILED, reload_object(failed_script_result).status
        )
        self.assertEquals(
            SCRIPT_STATUS.PENDING, reload_object(pending_script_result).status
        )
        self.assertEquals(
            SCRIPT_STATUS.RUNNING, reload_object(running_script_result).status
        )
Exemplo n.º 23
0
 def test__multicasts_at_most_once_per_five_seconds(self):
     # Note: Always use a random port for testing. (port=0)
     clock = Clock()
     protocol = BeaconingSocketProtocol(
         clock,
         port=0,
         process_incoming=False,
         loopback=True,
         interface="::",
         debug=True,
     )
     # Don't try to send out any replies.
     self.patch(services, "create_beacon_payload")
     monotonic_mock = self.patch(services.time, "monotonic")
     send_mcast_mock = self.patch(protocol, "send_multicast_beacons")
     self.patch(protocol, "send_beacon")
     monotonic_mock.side_effect = [
         # Initial queue
         6,
         # Initial dequeue
         6,
         # Second queue (hasn't yet been 5 seconds)
         10,
         # Third queue
         11,
         # Second dequeue
         11,
     ]
     yield protocol.queueMulticastBeaconing()
     clock.advance(0)
     self.assertThat(send_mcast_mock, MockCalledOnceWith({},
                                                         "advertisement"))
     send_mcast_mock.reset_mock()
     yield protocol.queueMulticastBeaconing()
     yield protocol.queueMulticastBeaconing(solicitation=True)
     clock.advance(4.9)
     self.assertThat(send_mcast_mock, MockNotCalled())
     clock.advance(0.1)
     self.assertThat(send_mcast_mock, MockCalledOnceWith({},
                                                         "solicitation"))
Exemplo n.º 24
0
    def test_mark_nodes_failed_after_script_overrun(self):
        node, script_set = self.make_node()
        now = datetime.now()
        script_set.last_ping = now
        script_set.save()
        passed_script_result = factory.make_ScriptResult(
            script_set=script_set, status=SCRIPT_STATUS.PASSED)
        failed_script_result = factory.make_ScriptResult(
            script_set=script_set, status=SCRIPT_STATUS.FAILED)
        pending_script_result = factory.make_ScriptResult(
            script_set=script_set, status=SCRIPT_STATUS.PENDING)
        script = factory.make_Script(timeout=timedelta(seconds=60))
        running_script_result = factory.make_ScriptResult(
            script_set=script_set,
            status=SCRIPT_STATUS.RUNNING,
            script=script,
            started=now - timedelta(minutes=10))

        mark_nodes_failed_after_missing_script_timeout()
        node = reload_object(node)

        self.assertEquals(self.failed_status, node.status)
        self.assertEquals(
            "%s has run past it's timeout(%s)" %
            (running_script_result.name,
             str(running_script_result.script.timeout)),
            node.error_description)
        if node.enable_ssh:
            self.assertThat(self.mock_stop, MockNotCalled())
        else:
            self.assertThat(self.mock_stop, MockCalledOnce())
        self.assertEquals(SCRIPT_STATUS.PASSED,
                          reload_object(passed_script_result).status)
        self.assertEquals(SCRIPT_STATUS.FAILED,
                          reload_object(failed_script_result).status)
        self.assertEquals(SCRIPT_STATUS.ABORTED,
                          reload_object(pending_script_result).status)
        self.assertEquals(SCRIPT_STATUS.TIMEDOUT,
                          reload_object(running_script_result).status)
    def test_mark_nodes_failed_after_missing_timeout_heartbeat(self):
        node, script_set = self.make_node()
        current_time = now()
        node_timeout = Config.objects.get_config("node_timeout")
        script_set.last_ping = current_time - timedelta(minutes=(node_timeout +
                                                                 1))
        script_set.save()
        script_results = [
            factory.make_ScriptResult(script_set=script_set,
                                      status=SCRIPT_STATUS.PENDING)
            for _ in range(3)
        ]

        mark_nodes_failed_after_missing_script_timeout(current_time,
                                                       node_timeout)
        node = reload_object(node)

        self.assertEquals(self.failed_status, node.status)
        self.assertEquals(
            "Node has not been heard from for the last %s minutes" %
            node_timeout,
            node.error_description,
        )
        self.assertIn(
            call("%s: Has not been heard from for the last %s minutes" %
                 (node.hostname, node_timeout)),
            self.maaslog.call_args_list,
        )
        if node.enable_ssh:
            self.assertThat(self.mock_stop, MockNotCalled())
        else:
            self.assertThat(self.mock_stop, MockCalledOnce())
            self.assertIn(
                call("%s: Stopped because SSH is disabled" % node.hostname),
                self.maaslog.call_args_list,
            )
        for script_result in script_results:
            self.assertEquals(SCRIPT_STATUS.TIMEDOUT,
                              reload_object(script_result).status)
Exemplo n.º 26
0
    def test_starting_and_stopping(self):
        deferToDatabase = self.patch(publication, "deferToDatabase")

        utcnow = patch_utcnow(self)
        cutoff = utcnow.replace(tzinfo=UTC) - timedelta(days=7)

        dnsgc = publication.DNSPublicationGarbageService()
        dnsgc.clock = clock = Clock()

        dnsgc.startService()
        self.assertTrue(dnsgc.running)
        self.assertTrue(dnsgc._loop.running)
        self.assertThat(deferToDatabase, MockNotCalled())
        self.assertThat(dnsgc._loop.interval, IsExpectedInterval)

        clock.advance(dnsgc._loop.interval)
        self.assertThat(deferToDatabase,
                        MockCalledOnceWith(dnsgc._collectGarbage, cutoff))
        self.assertThat(dnsgc._loop.interval, IsExpectedInterval)

        dnsgc.stopService()
        self.assertFalse(dnsgc.running)
        self.assertFalse(dnsgc._loop.running)
Exemplo n.º 27
0
    def test_mark_nodes_failed_after_missing_timeout_heartbeat(self):
        node, script_set = self.make_node()
        script_set.last_ping = datetime.now() - timedelta(minutes=11)
        script_set.save()
        script_results = [
            factory.make_ScriptResult(script_set=script_set,
                                      status=SCRIPT_STATUS.PENDING)
            for _ in range(3)
        ]

        mark_nodes_failed_after_missing_script_timeout()
        node = reload_object(node)

        self.assertEquals(self.failed_status, node.status)
        self.assertEquals('Node has missed the last 5 heartbeats',
                          node.error_description)
        if node.enable_ssh:
            self.assertThat(self.mock_stop, MockNotCalled())
        else:
            self.assertThat(self.mock_stop, MockCalledOnce())
        for script_result in script_results:
            self.assertEquals(SCRIPT_STATUS.TIMEDOUT,
                              reload_object(script_result).status)
Exemplo n.º 28
0
    def test_power_off_not_called_if_off(self):
        system_id = factory.make_name("system_id")
        context = {"power_address": factory.make_name("power_address")}
        extra_headers = {
            factory.make_name("key").encode(): [
                factory.make_name("value").encode()
            ]
            for _ in range(3)
        }
        vm = {
            "node": factory.make_name("node"),
            "type": factory.make_name("type"),
            "vmid": factory.make_name("vmid"),
            "status": "stopped",
        }
        self.patch(self.proxmox, "_login").return_value = succeed(
            extra_headers
        )
        self.patch(self.proxmox, "_find_vm").return_value = succeed(vm)

        yield self.proxmox.power_off(system_id, context)

        self.assertThat(self.mock_webhook_request, MockNotCalled())
Exemplo n.º 29
0
    def test_login_uses_api_token_adds_username(self):
        system_id = factory.make_name("system_id")
        context = {
            "power_address": factory.make_name("power_address"),
            "power_user": factory.make_name("power_user"),
            "power_pass": factory.make_name("power_pass"),
            "power_token_name": factory.make_name("power_token_name"),
            "power_token_secret": factory.make_name("power_token_secret"),
        }

        extra_headers = yield self.proxmox._login(system_id, context)

        self.assertEqual(
            {
                b"Authorization": [
                    f"PVEAPIToken={context['power_user']}!"
                    f"{context['power_token_name']}="
                    f"{context['power_token_secret']}".encode()
                ]
            },
            extra_headers,
        )
        self.assertThat(self.mock_webhook_request, MockNotCalled())
Exemplo n.º 30
0
    def test_doesnt_reports_services_to_region_when_the_same_status(self):
        # Pretend we're in a production environment.
        self.patch(sms, "is_dev_environment").return_value = False

        protocol, connecting = self.patch_rpc_methods()
        self.addCleanup((yield connecting))

        class ExampleService(AlwaysOnService):
            name = service_name = snap_service_name = (
                factory.make_name("service"))

        service = ExampleService()
        # Inveigle this new service into the service monitor.
        self.addCleanup(service_monitor._services.pop, service.name)
        service_monitor._services[service.name] = service

        state = ServiceState(SERVICE_STATE.ON, "running")
        mock_ensureServices = self.patch(service_monitor, "ensureServices")
        mock_ensureServices.return_value = succeed({
            service.name: state
        })

        client = getRegionClient()
        rpc_service = Mock()
        rpc_service.getClientNow.return_value = succeed(client)
        monitor_service = sms.ServiceMonitorService(
            rpc_service, Clock())
        monitor_service._services = yield monitor_service._buildServices({
            service.name: state
        })

        yield monitor_service.startService()
        yield monitor_service.stopService()

        self.assertThat(
            protocol.UpdateServices,
            MockNotCalled())