def test_creates_scripts(self):
        load_builtin_scripts()

        for script in BUILTIN_SCRIPTS:
            script_in_db = Script.objects.get(name=script.name)

            # While MAAS allows user scripts to leave these fields blank,
            # builtin scripts should always have values.
            self.assertTrue(script_in_db.title, script.name)
            self.assertTrue(script_in_db.description, script.name)
            self.assertTrue(script_in_db.script.data, script.name)
            self.assertNotEqual([], script_in_db.tags, script.name)

            # These values should always be set by the script loader.
            self.assertEqual(
                "Created by maas-3.0.0",
                script_in_db.script.comment,
                script.name,
            )
            self.assertTrue(script_in_db.default, script.name)
            if (
                script.name in NODE_INFO_SCRIPTS
                and NODE_INFO_SCRIPTS[script.name]["run_on_controller"]
            ):
                self.assertIn("deploy-info", script_in_db.tags)
            else:
                self.assertNotIn("deploy-info", script_in_db.tags)
    def test_update_script(self):
        load_builtin_scripts()
        update_script_values = random.choice(BUILTIN_SCRIPTS)
        script = Script.objects.get(name=update_script_values.name)
        # Fields which we can update
        script.description = factory.make_string()
        script.script_type = factory.pick_choice(SCRIPT_TYPE_CHOICES)
        script.destructive = not script.destructive
        # Put fake old data in to simulate updating a script.
        old_script = VersionedTextFile.objects.create(
            data=factory.make_string())
        script.script = old_script
        # User changeable fields.
        user_tags = [factory.make_name('tag') for _ in range(3)]
        script.tags = copy.deepcopy(user_tags)
        user_timeout = timedelta(random.randint(0, 1000))
        script.timeout = user_timeout
        script.save()

        load_builtin_scripts()
        script = reload_object(script)

        self.assertEquals(update_script_values.name, script.name)
        self.assertEquals(update_script_values.title, script.title)
        self.assertEquals(update_script_values.description, script.description)
        self.assertEquals("Updated by maas-%s" % get_maas_version(),
                          script.script.comment)
        self.assertEquals(update_script_values.script_type, script.script_type)
        self.assertEquals(update_script_values.destructive, script.destructive)
        self.assertEquals(old_script, script.script.previous_version)
        if script.destructive:
            user_tags.append('destructive')
        self.assertEquals(user_tags, script.tags)
        self.assertEquals(user_timeout, script.timeout)
        self.assertTrue(script.default)
Beispiel #3
0
 def test_sets_url(self):
     load_builtin_scripts()
     rack_controller = factory.make_RackController()
     interfaces = {
         factory.make_name("eth0"): {
             "type": "physical",
             "mac_address": factory.make_mac_address(),
             "parents": [],
             "links": [],
             "enabled": True,
         }
     }
     url = "http://%s/MAAS" % factory.make_name("host")
     rack_registered = register(
         rack_controller.system_id,
         interfaces=interfaces,
         url=urlparse(url),
         is_loopback=False,
     )
     self.assertEqual(url, rack_registered.url)
     rack_registered = register(
         rack_controller.system_id,
         interfaces=interfaces,
         url=urlparse("http://localhost/MAAS/"),
         is_loopback=True,
     )
     self.assertEqual("", rack_registered.url)
Beispiel #4
0
def inner_start_up(master=False):
    """Startup jobs that must run serialized w.r.t. other starting servers."""
    # Register our MAC data type with psycopg.
    register_mac_type(connection.cursor())

    # All commissioning and testing scripts are stored in the database. For
    # a commissioning ScriptSet to be created Scripts must exist first. Call
    # this early, only on the master process, to ensure they exist and are
    # only created once. If get_or_create_running_controller() is called before
    # this it will fail on first run.
    if master:
        load_builtin_scripts()

    # Ensure the this region is represented in the database. The first regiond
    # to pass through inner_start_up on this host can do this; it should NOT
    # be restricted to masters only. This also ensures that the MAAS ID is set
    # on the filesystem; it will be done in a post-commit hook and will thus
    # happen before `locks.startup` is released.
    node = RegionController.objects.get_or_create_running_controller()
    # Update region version
    ControllerInfo.objects.set_version(node, get_running_version())
    # Ensure that uuid is created after creating
    RegionController.objects.get_or_create_uuid()

    # Only perform the following if the master process for the
    # region controller.
    if master:
        # Freshen the kms SRV records.
        dns_kms_setting_changed()

        # Make sure the commissioning distro series is still a supported LTS.
        commissioning_distro_series = Config.objects.get_config(
            name="commissioning_distro_series"
        )
        ubuntu = UbuntuOS()
        if commissioning_distro_series not in (
            ubuntu.get_supported_commissioning_releases()
        ):
            Config.objects.set_config(
                "commissioning_distro_series",
                ubuntu.get_default_commissioning_release(),
            )
            Notification.objects.create_info_for_admins(
                "Ubuntu %s is no longer a supported commissioning "
                "series. Ubuntu %s has been automatically selected."
                % (
                    commissioning_distro_series,
                    ubuntu.get_default_commissioning_release(),
                ),
                ident="commissioning_release_deprecated",
            )

        with RegionConfiguration.open() as config:
            Config.objects.set_config("maas_url", config.maas_url)

        # Update deprecation notifications if needed
        sync_deprecation_notifications()
    def test_mark_nodes_failed_after_builtin_commiss_script_overrun(self):
        load_builtin_scripts()
        user = factory.make_admin()
        node = factory.make_Node(status=NODE_STATUS.COMMISSIONING, owner=user)
        script_set = ScriptSet.objects.create_commissioning_script_set(node)
        node.current_commissioning_script_set = script_set
        node.save()
        current_time = now()
        script_set.last_ping = current_time
        script_set.save()
        pending_script_results = list(script_set.scriptresult_set.all())
        passed_script_result = pending_script_results.pop()
        passed_script_result.status = SCRIPT_STATUS.PASSED
        passed_script_result.save()
        failed_script_result = pending_script_results.pop()
        failed_script_result.status = SCRIPT_STATUS.FAILED
        failed_script_result.save()
        running_script_result = pending_script_results.pop()
        running_script_result.status = SCRIPT_STATUS.RUNNING
        running_script_result.started = current_time - timedelta(minutes=10)
        running_script_result.save()

        mark_nodes_failed_after_missing_script_timeout(current_time, 20)
        node = reload_object(node)

        self.assertEquals(NODE_STATUS.FAILED_COMMISSIONING, node.status)
        timeout = str(
            Script.objects.get(name=running_script_result.name).timeout)
        self.assertEquals(
            "%s has run past it's timeout(%s)" %
            (running_script_result.name, timeout),
            node.error_description,
        )
        self.assertIn(
            call("%s: %s has run past it's timeout(%s)" %
                 (node.hostname, running_script_result.name, timeout)),
            self.maaslog.call_args_list,
        )
        if node.enable_ssh:
            self.assertThat(self.mock_stop, MockNotCalled())
        else:
            self.assertThat(self.mock_stop, MockCalledOnce())
            self.assertIn(
                call("%s: Stopped because SSH is disabled" % node.hostname),
                self.maaslog.call_args_list,
            )
        self.assertEquals(SCRIPT_STATUS.PASSED,
                          reload_object(passed_script_result).status)
        self.assertEquals(SCRIPT_STATUS.FAILED,
                          reload_object(failed_script_result).status)
        self.assertEquals(SCRIPT_STATUS.TIMEDOUT,
                          reload_object(running_script_result).status)
        for script_result in pending_script_results:
            self.assertEquals(SCRIPT_STATUS.ABORTED,
                              reload_object(script_result).status)
Beispiel #6
0
    def test_commissions_node(self):
        load_builtin_scripts()
        self.prepare_rack_rpc()

        user = factory.make_User()
        node = factory.make_Node(owner=user)
        self.patch(Node, "_start").return_value = None

        with post_commit_hooks:
            commission_node(node.system_id, user)

        self.assertEqual(NODE_STATUS.COMMISSIONING, reload_object(node).status)
Beispiel #7
0
def inner_start_up(master=False):
    """Startup jobs that must run serialized w.r.t. other starting servers."""
    # Register our MAC data type with psycopg.
    register_mac_type(connection.cursor())

    # Ensure the this region is represented in the database. The first regiond
    # to pass through inner_start_up on this host can do this; it should NOT
    # be restricted to masters only. This also ensures that the MAAS ID is set
    # on the filesystem; it will be done in a post-commit hook and will thus
    # happen before `locks.startup` is released.
    region = RegionController.objects.get_or_create_running_controller()
    # Ensure that uuid is created after creating
    RegionController.objects.get_or_create_uuid()

    # Only perform the following if the master process for the
    # region controller.
    if master:
        # Freshen the kms SRV records.
        dns_kms_setting_changed()

        # Add or update all builtin scripts
        load_builtin_scripts()

        # Make sure the commissioning distro series is still a supported LTS.
        commissioning_distro_series = Config.objects.get_config(
            name="commissioning_distro_series")
        ubuntu = UbuntuOS()
        if commissioning_distro_series not in (
                ubuntu.get_supported_commissioning_releases()):
            Config.objects.set_config(
                "commissioning_distro_series",
                ubuntu.get_default_commissioning_release(),
            )
            Notification.objects.create_info_for_admins(
                "Ubuntu %s is no longer a supported commissioning "
                "series. Ubuntu %s has been automatically selected." % (
                    commissioning_distro_series,
                    ubuntu.get_default_commissioning_release(),
                ),
                ident="commissioning_release_deprecated",
            )

        # Update deprecation notifications if needed
        sync_deprecation_notifications()

        # Refresh soon after this transaction is in.
        post_commit_do(reactor.callLater, 0, refreshRegion, region)

        # Create a certificate for the region.
        post_commit_do(reactor.callLater, 0, generate_certificate_if_needed)
    def test_update_script(self):
        load_builtin_scripts()
        update_script_values = random.choice(BUILTIN_SCRIPTS)
        script = Script.objects.get(name=update_script_values.name)

        # Fields which we can update
        orig_title = script.title
        orig_description = script.description
        orig_script_type = script.script_type
        orig_results = script.results
        orig_parameters = script.parameters

        script.title = factory.make_string()
        script.description = factory.make_string()
        script.script_type = factory.pick_choice(SCRIPT_TYPE_CHOICES)
        script.results = [factory.make_name("result")]
        script.script.parameters = {
            factory.make_name("param"): {
                "type": "storage"
            }
        }

        # Put fake old data in to simulate updating a script.
        old_script = VersionedTextFile.objects.create(
            data=factory.make_string())
        script.script = old_script

        # User changeable fields.
        user_tags = [factory.make_name("tag") for _ in range(3)]
        script.tags = copy.deepcopy(user_tags)
        user_timeout = timedelta(random.randint(0, 1000))
        script.timeout = user_timeout
        script.save()

        load_builtin_scripts()
        script = reload_object(script)

        self.assertEquals(orig_title, script.title, script.name)
        self.assertEquals(orig_description, script.description, script.name)
        self.assertEquals(orig_script_type, script.script_type, script.name)
        self.assertDictEqual(orig_results, script.results, script.name)
        self.assertDictEqual(orig_parameters, script.parameters, script.name)

        self.assertThat(script.tags, ContainsAll(user_tags))
        self.assertEquals(user_timeout, script.timeout)

        self.assertEquals(old_script, script.script.previous_version)
        self.assertEquals("Updated by maas-%s" % get_maas_version(),
                          script.script.comment)
        self.assertTrue(script.default)
    def test_creates_scripts(self):
        load_builtin_scripts()

        for script in BUILTIN_SCRIPTS:
            script_in_db = Script.objects.get(name=script.name)
            self.assertEquals(script.title, script_in_db.title)
            self.assertEquals(script.description, script_in_db.description)
            self.assertEquals("Created by maas-%s" % get_maas_version(),
                              script_in_db.script.comment)
            self.assertItemsEqual(script.tags, script_in_db.tags)
            self.assertEquals(script.script_type, script_in_db.script_type)
            self.assertEquals(script.timeout, script_in_db.timeout)
            self.assertEquals(script.destructive, script_in_db.destructive)
            self.assertTrue(script_in_db.default)
Beispiel #10
0
    def setUp(self):
        super().setUp()
        self.maas_id = None

        def set_maas_id(maas_id):
            self.maas_id = maas_id

        self.set_maas_id = self.patch(regionservice, "set_maas_id")
        self.set_maas_id.side_effect = set_maas_id

        def get_maas_id():
            return self.maas_id

        self.get_maas_id = self.patch(regionservice, "get_maas_id")
        self.get_maas_id.side_effect = get_maas_id
        load_builtin_scripts()
    def test_update_doesnt_revert_script(self):
        load_builtin_scripts()
        update_script_index = random.randint(0, len(BUILTIN_SCRIPTS) - 2)
        update_script_values = BUILTIN_SCRIPTS[update_script_index]
        script = Script.objects.get(name=update_script_values.name)
        # Put fake new data in to simulate another MAAS region updating
        # to a newer version.
        updated_description = factory.make_name('description')
        script.description = updated_description
        new_script = factory.make_string()
        script.script = script.script.update(new_script)
        updated_script_type = factory.pick_choice(SCRIPT_TYPE_CHOICES)
        script.script_type = updated_script_type
        updated_destructive = not script.destructive
        script.destructive = updated_destructive

        # Fake user updates
        user_tags = [factory.make_name('tag') for _ in range(3)]
        script.tags = user_tags
        user_timeout = timedelta(random.randint(0, 1000))
        script.timeout = user_timeout
        script.save()

        # Test that subsequent scripts still get updated
        second_update_script_values = BUILTIN_SCRIPTS[update_script_index + 1]
        second_script = Script.objects.get(
            name=second_update_script_values.name)
        # Put fake old data in to simulate updating a script.
        second_script.title = factory.make_string()
        second_script.save()

        load_builtin_scripts()
        script = reload_object(script)

        self.assertEquals(update_script_values.name, script.name)
        self.assertEquals(update_script_values.title, script.title)
        self.assertEquals(updated_description, script.description)
        self.assertEquals(updated_script_type, script.script_type)
        self.assertEquals(updated_destructive, script.destructive)
        self.assertEquals(new_script, script.script.data)
        self.assertEquals(user_tags, script.tags)
        self.assertEquals(user_timeout, script.timeout)
        self.assertTrue(script.default)

        second_script = reload_object(second_script)
        self.assertEquals(second_update_script_values.title,
                          second_script.title)
Beispiel #12
0
    def test_creates_scripts(self):
        load_builtin_scripts()

        for script in BUILTIN_SCRIPTS:
            script_in_db = Script.objects.get(name=script.name)

            # While MAAS allows user scripts to leave these fields blank,
            # builtin scripts should always have values.
            self.assertTrue(script_in_db.title, script.name)
            self.assertTrue(script_in_db.description, script.name)
            self.assertTrue(script_in_db.script.data, script.name)
            self.assertThat(script_in_db.tags, Not(Equals([])), script.name)

            # These values should always be set by the script loader.
            self.assertEquals("Created by maas-%s" % get_maas_version(),
                              script_in_db.script.comment, script.name)
            self.assertTrue(script_in_db.default, script.name)
Beispiel #13
0
    def setUp(self):
        super().setUp()
        self.maas_id = None

        def set_maas_id(maas_id):
            self.maas_id = maas_id

        self.set_maas_id = self.patch(regionservice, "set_maas_id")
        self.set_maas_id.side_effect = set_maas_id

        def get_maas_id():
            return self.maas_id

        self.get_maas_id = self.patch(regionservice, "get_maas_id")
        self.get_maas_id.side_effect = get_maas_id
        self.patch(ipc,
                   "get_all_interface_source_addresses").return_value = set()
        load_builtin_scripts()
    def test_update_removes_deploy_info_tag(self):
        load_builtin_scripts()
        script = (
            Script.objects.filter(default=True)
            .exclude(tags__contains=["deploy-info"])
            .first()
        )

        script.add_tag("deploy-info")
        # Put fake old data in to simulate updating a script.
        old_script = VersionedTextFile.objects.create(
            data=factory.make_string()
        )
        script.script = old_script
        script.save()

        load_builtin_scripts()
        script = reload_object(script)

        self.assertNotIn("deploy-info", script.tags)
Beispiel #15
0
def inner_start_up():
    """Startup jobs that must run serialized w.r.t. other starting servers."""
    # Register our MAC data type with psycopg.
    register_mac_type(connection.cursor())

    # Ensure the this region is represented in the database. The first regiond
    # to pass through inner_start_up on this host can do this; it should NOT
    # be restricted to masters only. This also ensures that the MAAS ID is set
    # on the filesystem; it will be done in a post-commit hook and will thus
    # happen before `locks.startup` is released.
    region = RegionController.objects.get_or_create_running_controller()

    # Only perform the following if the master process for the
    # region controller.
    if is_master_process():
        # Freshen the kms SRV records.
        dns_kms_setting_changed()
        # Add or update all builtin scripts
        load_builtin_scripts()
        # Refresh soon after this transaction is in.
        post_commit_do(reactor.callLater, 0, refreshRegion, region)
    def test_update_tag_unchanged_content(self):
        load_builtin_scripts()
        untagged_script = (
            Script.objects.filter(default=True)
            .exclude(tags__contains=["deploy-info"])
            .first()
        )
        tagged_script = Script.objects.filter(
            default=True, tags__contains=["deploy-info"]
        ).first()

        untagged_script.add_tag("deploy-info")
        untagged_script.save()
        tagged_script.remove_tag("deploy-info")
        tagged_script.save()

        load_builtin_scripts()
        untagged_script = reload_object(untagged_script)
        tagged_script = reload_object(tagged_script)

        self.assertNotIn("deploy-info", untagged_script.tags)
        self.assertIn("deploy-info", tagged_script.tags)
Beispiel #17
0
def populate_main():
    """Populate the main data all in one transaction."""
    admin = factory.make_admin(username="******",
                               password="******",
                               completed_intro=False)  # noqa
    user1, _ = factory.make_user_with_keys(username="******",
                                           password="******",
                                           completed_intro=False)
    user2, _ = factory.make_user_with_keys(username="******",
                                           password="******",
                                           completed_intro=False)

    # Physical zones.
    zones = [
        factory.make_Zone(name="zone-north"),
        factory.make_Zone(name="zone-south"),
    ]

    # DNS domains.
    domains = [
        Domain.objects.get_default_domain(),
        factory.make_Domain("sample"),
        factory.make_Domain("ubnt"),
    ]

    # Create the fabrics that will be used by the regions, racks,
    # machines, and devices.
    fabric0 = Fabric.objects.get_default_fabric()
    fabric0_untagged = fabric0.get_default_vlan()
    fabric0_vlan10 = factory.make_VLAN(fabric=fabric0, vid=10)
    fabric1 = factory.make_Fabric()
    fabric1_untagged = fabric1.get_default_vlan()
    fabric1_vlan42 = factory.make_VLAN(fabric=fabric1, vid=42)
    empty_fabric = factory.make_Fabric()  # noqa

    # Create some spaces.
    space_mgmt = factory.make_Space("management")
    space_storage = factory.make_Space("storage")
    space_internal = factory.make_Space("internal")
    space_ipv6_testbed = factory.make_Space("ipv6-testbed")

    # Subnets used by regions, racks, machines, and devices.
    subnet_1 = factory.make_Subnet(
        cidr="172.16.1.0/24",
        gateway_ip="172.16.1.1",
        vlan=fabric0_untagged,
        space=space_mgmt,
    )
    subnet_2 = factory.make_Subnet(
        cidr="172.16.2.0/24",
        gateway_ip="172.16.2.1",
        vlan=fabric1_untagged,
        space=space_mgmt,
    )
    subnet_3 = factory.make_Subnet(
        cidr="172.16.3.0/24",
        gateway_ip="172.16.3.1",
        vlan=fabric0_vlan10,
        space=space_storage,
    )
    subnet_4 = factory.make_Subnet(  # noqa
        cidr="172.16.4.0/24",
        gateway_ip="172.16.4.1",
        vlan=fabric0_vlan10,
        space=space_internal,
    )
    subnet_2001_db8_42 = factory.make_Subnet(  # noqa
        cidr="2001:db8:42::/64",
        gateway_ip="",
        vlan=fabric1_vlan42,
        space=space_ipv6_testbed,
    )
    ipv4_subnets = [subnet_1, subnet_2, subnet_3, subnet_4]

    # Static routes on subnets.
    factory.make_StaticRoute(source=subnet_1, destination=subnet_2)
    factory.make_StaticRoute(source=subnet_1, destination=subnet_3)
    factory.make_StaticRoute(source=subnet_1, destination=subnet_4)
    factory.make_StaticRoute(source=subnet_2, destination=subnet_1)
    factory.make_StaticRoute(source=subnet_2, destination=subnet_3)
    factory.make_StaticRoute(source=subnet_2, destination=subnet_4)
    factory.make_StaticRoute(source=subnet_3, destination=subnet_1)
    factory.make_StaticRoute(source=subnet_3, destination=subnet_2)
    factory.make_StaticRoute(source=subnet_3, destination=subnet_4)
    factory.make_StaticRoute(source=subnet_4, destination=subnet_1)
    factory.make_StaticRoute(source=subnet_4, destination=subnet_2)
    factory.make_StaticRoute(source=subnet_4, destination=subnet_3)

    # Load builtin scripts in the database so we can generate fake results
    # below.
    load_builtin_scripts()

    hostname = gethostname()
    region_rack = get_one(
        Node.objects.filter(node_type=NODE_TYPE.REGION_AND_RACK_CONTROLLER,
                            hostname=hostname))
    # If "make run" executes before "make sampledata", the rack may have
    # already registered.
    if region_rack is None:
        region_rack = factory.make_Node(
            node_type=NODE_TYPE.REGION_AND_RACK_CONTROLLER,
            hostname=hostname,
            interface=False,
        )

        # Get list of mac addresses that should be used for the region
        # rack controller. This will make sure the RegionAdvertisingService
        # picks the correct region on first start-up and doesn't get multiple.
        mac_addresses = get_mac_addresses()

        def get_next_mac():
            try:
                return mac_addresses.pop()
            except IndexError:
                return factory.make_mac_address()

        # Region and rack controller (hostname of dev machine)
        #   eth0     - fabric 0 - untagged
        #   eth1     - fabric 0 - untagged
        #   eth2     - fabric 1 - untagged - 172.16.2.2/24 - static
        #   bond0    - fabric 0 - untagged - 172.16.1.2/24 - static
        #   bond0.10 - fabric 0 - 10       - 172.16.3.2/24 - static
        eth0 = factory.make_Interface(
            INTERFACE_TYPE.PHYSICAL,
            name="eth0",
            node=region_rack,
            vlan=fabric0_untagged,
            mac_address=get_next_mac(),
        )
        eth1 = factory.make_Interface(
            INTERFACE_TYPE.PHYSICAL,
            name="eth1",
            node=region_rack,
            vlan=fabric0_untagged,
            mac_address=get_next_mac(),
        )
        eth2 = factory.make_Interface(
            INTERFACE_TYPE.PHYSICAL,
            name="eth2",
            node=region_rack,
            vlan=fabric1_untagged,
            mac_address=get_next_mac(),
        )
        bond0 = factory.make_Interface(
            INTERFACE_TYPE.BOND,
            name="bond0",
            node=region_rack,
            vlan=fabric0_untagged,
            parents=[eth0, eth1],
            mac_address=eth0.mac_address,
        )
        bond0_10 = factory.make_Interface(
            INTERFACE_TYPE.VLAN,
            node=region_rack,
            vlan=fabric0_vlan10,
            parents=[bond0],
        )
        factory.make_StaticIPAddress(
            alloc_type=IPADDRESS_TYPE.STICKY,
            ip="172.16.1.2",
            subnet=subnet_1,
            interface=bond0,
        )
        factory.make_StaticIPAddress(
            alloc_type=IPADDRESS_TYPE.STICKY,
            ip="172.16.2.2",
            subnet=subnet_2,
            interface=eth2,
        )
        factory.make_StaticIPAddress(
            alloc_type=IPADDRESS_TYPE.STICKY,
            ip="172.16.3.2",
            subnet=subnet_3,
            interface=bond0_10,
        )
        fabric0_untagged.primary_rack = region_rack
        fabric0_untagged.save()
        fabric1_untagged.primary_rack = region_rack
        fabric1_untagged.save()
        fabric0_vlan10.primary_rack = region_rack
        fabric0_vlan10.save()

    # Rack controller (happy-rack)
    #   eth0     - fabric 0 - untagged
    #   eth1     - fabric 0 - untagged
    #   eth2     - fabric 1 - untagged - 172.16.2.3/24 - static
    #   bond0    - fabric 0 - untagged - 172.16.1.3/24 - static
    #   bond0.10 - fabric 0 - 10       - 172.16.3.3/24 - static
    rack = factory.make_Node(
        node_type=NODE_TYPE.RACK_CONTROLLER,
        hostname="happy-rack",
        interface=False,
    )
    eth0 = factory.make_Interface(INTERFACE_TYPE.PHYSICAL,
                                  name="eth0",
                                  node=rack,
                                  vlan=fabric0_untagged)
    eth1 = factory.make_Interface(INTERFACE_TYPE.PHYSICAL,
                                  name="eth1",
                                  node=rack,
                                  vlan=fabric0_untagged)
    eth2 = factory.make_Interface(INTERFACE_TYPE.PHYSICAL,
                                  name="eth2",
                                  node=rack,
                                  vlan=fabric1_untagged)
    bond0 = factory.make_Interface(
        INTERFACE_TYPE.BOND,
        name="bond0",
        node=rack,
        vlan=fabric0_untagged,
        parents=[eth0, eth1],
    )
    bond0_10 = factory.make_Interface(INTERFACE_TYPE.VLAN,
                                      node=rack,
                                      vlan=fabric0_vlan10,
                                      parents=[bond0])
    factory.make_StaticIPAddress(
        alloc_type=IPADDRESS_TYPE.STICKY,
        ip="172.16.1.3",
        subnet=subnet_1,
        interface=bond0,
    )
    factory.make_StaticIPAddress(
        alloc_type=IPADDRESS_TYPE.STICKY,
        ip="172.16.2.3",
        subnet=subnet_2,
        interface=eth2,
    )
    factory.make_StaticIPAddress(
        alloc_type=IPADDRESS_TYPE.STICKY,
        ip="172.16.3.3",
        subnet=subnet_3,
        interface=bond0_10,
    )
    fabric0_untagged.secondary_rack = rack
    fabric0_untagged.save()
    fabric1_untagged.secondary_rack = rack
    fabric1_untagged.save()
    fabric0_vlan10.secondary_rack = rack
    fabric0_vlan10.save()

    # Region controller (happy-region)
    #   eth0     - fabric 0 - untagged
    #   eth1     - fabric 0 - untagged
    #   eth2     - fabric 1 - untagged - 172.16.2.4/24 - static
    #   bond0    - fabric 0 - untagged - 172.16.1.4/24 - static
    #   bond0.10 - fabric 0 - 10       - 172.16.3.4/24 - static
    region = factory.make_Node(
        node_type=NODE_TYPE.REGION_CONTROLLER,
        hostname="happy-region",
        interface=False,
    )
    eth0 = factory.make_Interface(
        INTERFACE_TYPE.PHYSICAL,
        name="eth0",
        node=region,
        vlan=fabric0_untagged,
    )
    eth1 = factory.make_Interface(
        INTERFACE_TYPE.PHYSICAL,
        name="eth1",
        node=region,
        vlan=fabric0_untagged,
    )
    eth2 = factory.make_Interface(
        INTERFACE_TYPE.PHYSICAL,
        name="eth2",
        node=region,
        vlan=fabric1_untagged,
    )
    bond0 = factory.make_Interface(
        INTERFACE_TYPE.BOND,
        name="bond0",
        node=region,
        vlan=fabric0_untagged,
        parents=[eth0, eth1],
    )
    bond0_10 = factory.make_Interface(INTERFACE_TYPE.VLAN,
                                      node=region,
                                      vlan=fabric0_vlan10,
                                      parents=[bond0])
    factory.make_StaticIPAddress(
        alloc_type=IPADDRESS_TYPE.STICKY,
        ip="172.16.1.4",
        subnet=subnet_1,
        interface=bond0,
    )
    factory.make_StaticIPAddress(
        alloc_type=IPADDRESS_TYPE.STICKY,
        ip="172.16.2.4",
        subnet=subnet_2,
        interface=eth2,
    )
    factory.make_StaticIPAddress(
        alloc_type=IPADDRESS_TYPE.STICKY,
        ip="172.16.3.4",
        subnet=subnet_3,
        interface=bond0_10,
    )

    # Create one machine for every status. Each machine has a random interface
    # and storage configration.
    node_statuses = [
        status for status in map_enum(NODE_STATUS).items() if status not in
        [NODE_STATUS.MISSING, NODE_STATUS.RESERVED, NODE_STATUS.RETIRED]
    ]
    machines = []
    test_scripts = [
        script.name
        for script in Script.objects.filter(script_type=SCRIPT_TYPE.TESTING)
    ]
    for _, status in node_statuses:
        owner = None
        if status in ALLOCATED_NODE_STATUSES:
            owner = random.choice([admin, user1, user2])
        elif status in [
                NODE_STATUS.COMMISSIONING,
                NODE_STATUS.FAILED_RELEASING,
        ]:
            owner = admin

        machine = factory.make_Node(
            status=status,
            owner=owner,
            zone=random.choice(zones),
            interface=False,
            with_boot_disk=False,
            power_type="manual",
            domain=random.choice(domains),
            memory=random.choice([1024, 4096, 8192]),
            description=random.choice([
                "",
                "Scheduled for removeal",
                "Firmware old",
                "Earmarked for Project Fuse in April",
            ]),
            cpu_count=random.randint(2, 8),
        )
        machine.set_random_hostname()
        machines.append(machine)

        # Create random network configuration.
        RandomInterfaceFactory.create_random(machine)

        # Add random storage devices and set a random layout.
        for _ in range(random.randint(1, 5)):
            factory.make_PhysicalBlockDevice(
                node=machine,
                size=random.randint(LARGE_BLOCK_DEVICE,
                                    LARGE_BLOCK_DEVICE * 10),
            )
        if status in [
                NODE_STATUS.READY,
                NODE_STATUS.ALLOCATED,
                NODE_STATUS.DEPLOYING,
                NODE_STATUS.DEPLOYED,
                NODE_STATUS.FAILED_DEPLOYMENT,
                NODE_STATUS.RELEASING,
                NODE_STATUS.FAILED_RELEASING,
        ]:
            machine.set_storage_layout(
                random.choice([
                    layout for layout in STORAGE_LAYOUTS.keys()
                    if layout != "vmfs6"
                ]))
            if status != NODE_STATUS.READY:
                machine._create_acquired_filesystems()

        # Add a random amount of events.
        for _ in range(random.randint(25, 100)):
            factory.make_Event(node=machine)

        # Add in commissioning and testing results.
        if status != NODE_STATUS.NEW:
            for _ in range(0, random.randint(1, 10)):
                css = ScriptSet.objects.create_commissioning_script_set(
                    machine)
                scripts = set()
                for __ in range(1, len(test_scripts)):
                    scripts.add(random.choice(test_scripts))
                tss = ScriptSet.objects.create_testing_script_set(
                    machine, list(scripts))
            machine.current_commissioning_script_set = css
            machine.current_testing_script_set = tss
            machine.save()

        # Fill in historic results
        for script_set in machine.scriptset_set.all():
            if script_set in [css, tss]:
                continue
            for script_result in script_set:
                # Can't use script_result.store_result as it will try to
                # process the result and fail on the fake data.
                script_result.exit_status = random.randint(0, 255)
                if script_result.exit_status == 0:
                    script_result.status = SCRIPT_STATUS.PASSED
                else:
                    script_result.status = random.choice(
                        list(SCRIPT_STATUS_FAILED))
                script_result.started = factory.make_date()
                script_result.ended = script_result.started + timedelta(
                    seconds=random.randint(0, 10000))
                script_result.stdout = Bin(
                    factory.make_string().encode("utf-8"))
                script_result.stderr = Bin(
                    factory.make_string().encode("utf-8"))
                script_result.output = Bin(
                    factory.make_string().encode("utf-8"))
                script_result.save()

        # Only add in results in states where commissiong should be completed.
        if status not in [NODE_STATUS.NEW, NODE_STATUS.COMMISSIONING]:
            if status == NODE_STATUS.FAILED_COMMISSIONING:
                exit_status = random.randint(1, 255)
                script_status = random.choice(list(SCRIPT_STATUS_FAILED))
            else:
                exit_status = 0
                script_status = SCRIPT_STATUS.PASSED
            for script_result in css:
                # Can't use script_result.store_result as it will try to
                # process the result and fail on the fake data.
                script_result.status = script_status
                script_result.exit_status = exit_status
                script_result.started = factory.make_date()
                script_result.ended = script_result.started + timedelta(
                    seconds=random.randint(0, 10000))
                script_result.stdout = Bin(
                    factory.make_string().encode("utf-8"))
                script_result.stderr = Bin(
                    factory.make_string().encode("utf-8"))
                script_result.output = Bin(
                    factory.make_string().encode("utf-8"))
                script_result.save()
        elif status == NODE_STATUS.COMMISSIONING:
            for script_result in css:
                script_result.status = random.choice(
                    list(SCRIPT_STATUS_RUNNING_OR_PENDING))
                if script_result.status != SCRIPT_STATUS.PENDING:
                    script_result.started = factory.make_date()
                script_result.save()

        # Only add in results in states where testing should be completed.
        if status not in [NODE_STATUS.NEW, NODE_STATUS.TESTING]:
            if status == NODE_STATUS.FAILED_TESTING:
                exit_status = random.randint(1, 255)
                script_status = random.choice(list(SCRIPT_STATUS_FAILED))
            else:
                exit_status = 0
                script_status = SCRIPT_STATUS.PASSED
            for script_result in tss:
                # Can't use script_result.store_result as it will try to
                # process the result and fail on the fake data.
                script_result.status = script_status
                script_result.exit_status = exit_status
                script_result.started = factory.make_date()
                script_result.ended = script_result.started + timedelta(
                    seconds=random.randint(0, 10000))
                script_result.stdout = Bin(
                    factory.make_string().encode("utf-8"))
                script_result.stderr = Bin(
                    factory.make_string().encode("utf-8"))
                script_result.output = Bin(
                    factory.make_string().encode("utf-8"))
                script_result.save()
        elif status == NODE_STATUS.TESTING:
            for script_result in tss:
                script_result.status = random.choice(
                    list(SCRIPT_STATUS_RUNNING_OR_PENDING))
                if script_result.status != SCRIPT_STATUS.PENDING:
                    script_result.started = factory.make_date()
                script_result.save()

        # Add installation results.
        if status in [
                NODE_STATUS.DEPLOYING,
                NODE_STATUS.DEPLOYED,
                NODE_STATUS.FAILED_DEPLOYMENT,
        ]:
            script_set = ScriptSet.objects.create_installation_script_set(
                machine)
            machine.current_installation_script_set = script_set
            machine.save()

        if status == NODE_STATUS.DEPLOYED:
            for script_result in machine.current_installation_script_set:
                stdout = factory.make_string().encode("utf-8")
                script_result.store_result(0, stdout)
        elif status == NODE_STATUS.FAILED_DEPLOYMENT:
            for script_result in machine.current_installation_script_set:
                exit_status = random.randint(1, 255)
                stdout = factory.make_string().encode("utf-8")
                stderr = factory.make_string().encode("utf-8")
                script_result.store_result(exit_status, stdout, stderr)

        # Add children devices to the deployed machine.
        if status == NODE_STATUS.DEPLOYED:
            boot_interface = machine.get_boot_interface()
            for _ in range(5):
                device = factory.make_Device(
                    interface=True,
                    domain=machine.domain,
                    parent=machine,
                    vlan=boot_interface.vlan,
                )
                device.set_random_hostname()
                RandomInterfaceFactory.assign_ip(
                    device.get_boot_interface(),
                    alloc_type=IPADDRESS_TYPE.STICKY,
                )

    # Create a few pods to and assign a random set of the machines to the pods.
    pods = [None]
    pod_storage_pools = defaultdict(list)
    machines_in_pods = defaultdict(list)
    for _ in range(3):
        subnet = random.choice(ipv4_subnets)
        ip = factory.pick_ip_in_Subnet(subnet)
        ip_address = factory.make_StaticIPAddress(
            alloc_type=IPADDRESS_TYPE.STICKY, ip=ip, subnet=subnet)
        power_address = "qemu+ssh://ubuntu@%s/system" % ip
        pod = factory.make_Pod(
            pod_type="virsh",
            parameters={"power_address": power_address},
            ip_address=ip_address,
            capabilities=[
                Capabilities.DYNAMIC_LOCAL_STORAGE,
                Capabilities.COMPOSABLE,
            ],
        )
        for _ in range(3):
            pool = factory.make_PodStoragePool(pod)
            pod_storage_pools[pod].append(pool)
        pod.default_storage_pool = pool
        pod.save()
        pods.append(pod)
    for _ in range(3):
        subnet = random.choice(ipv4_subnets)
        ip = factory.pick_ip_in_Subnet(subnet)
        ip_address = factory.make_StaticIPAddress(
            alloc_type=IPADDRESS_TYPE.STICKY, ip=ip, subnet=subnet)
        power_address = "%s" % ip
        pod = factory.make_Pod(
            pod_type="rsd",
            parameters={
                "power_address": power_address,
                "power_user": "******",
                "power_pass": "******",
            },
            ip_address=ip_address,
            capabilities=[
                Capabilities.DYNAMIC_LOCAL_STORAGE,
                Capabilities.COMPOSABLE,
            ],
        )
        for _ in range(3):
            pool = factory.make_PodStoragePool(pod)
            pod_storage_pools[pod].append(pool)
        pod.default_storage_pool = pool
        pod.save()
        pods.append(pod)
    for machine in machines:
        # Add the machine to the pod if its lucky day!
        pod = random.choice(pods)
        if pod is not None:
            machine.bmc = pod
            machine.instance_power_parameters = {"power_id": machine.hostname}
            machine.save()
            machines_in_pods[pod].append(machine)

            # Assign the block devices on the machine to a storage pool.
            for block_device in machine.physicalblockdevice_set.all():
                block_device.storage_pool = random.choice(
                    pod_storage_pools[pod])
                block_device.save()

    # Update the pod attributes so that it has more available then used.
    for pod in pods[1:]:
        pod.cores = pod.get_used_cores() + random.randint(4, 8)
        pod.memory = pod.get_used_memory() + random.choice(
            [1024, 2048, 4096, 4096 * 4, 4096 * 8])
        pod.local_storage = sum(pool.storage
                                for pool in pod_storage_pools[pod])
        pod.save()

    # Create a few devices.
    for _ in range(10):
        device = factory.make_Device(interface=True)
        device.set_random_hostname()

    # Add some DHCP snippets.
    # - Global
    factory.make_DHCPSnippet(
        name="foo class",
        description="adds class for vender 'foo'",
        value=VersionedTextFile.objects.create(data=dedent("""\
            class "foo" {
                match if substring (
                    option vendor-class-identifier, 0, 3) = "foo";
            }
        """)),
    )
    factory.make_DHCPSnippet(
        name="bar class",
        description="adds class for vender 'bar'",
        value=VersionedTextFile.objects.create(data=dedent("""\
            class "bar" {
                match if substring (
                    option vendor-class-identifier, 0, 3) = "bar";
            }
        """)),
        enabled=False,
    )
    # - Subnet
    factory.make_DHCPSnippet(
        name="600 lease time",
        description="changes lease time to 600 secs.",
        value=VersionedTextFile.objects.create(data="default-lease-time 600;"),
        subnet=subnet_1,
    )
    factory.make_DHCPSnippet(
        name="7200 max lease time",
        description="changes max lease time to 7200 secs.",
        value=VersionedTextFile.objects.create(data="max-lease-time 7200;"),
        subnet=subnet_2,
        enabled=False,
    )
    # - Node
    factory.make_DHCPSnippet(
        name="boot from other server",
        description="instructs device to boot from other server",
        value=VersionedTextFile.objects.create(data=dedent("""\
            filename "test-boot";
            server-name "boot.from.me";
        """)),
        node=device,
    )

    # Add notifications for admins, users, and each individual user, and for
    # each notification category.
    factory.make_Notification(
        "Attention admins! Core critical! Meltdown imminent! Evacuate "
        "habitat immediately!",
        admins=True,
        category="error",
    )
    factory.make_Notification(
        "Dear users, rumours of a core meltdown are unfounded. Please "
        "return to your home-pods and places of business.",
        users=True,
        category="warning",
    )
    factory.make_Notification(
        "FREE! For the next 2 hours get FREE blueberry and iodine pellets "
        "at the nutri-dispensers.",
        users=True,
        category="success",
    )
    for user in User.objects.all():
        context = {"name": user.username.capitalize()}
        factory.make_Notification(
            "Greetings, {name}! Get away from the habitat for the weekend and "
            "visit the Mare Nubium with MAAS Tours. Use the code METAL to "
            "claim a special gift!",
            user=user,
            context=context,
            category="info",
        )
Beispiel #18
0
 def setUp(self):
     super().setUp()
     load_builtin_scripts()
    def test_update_doesnt_revert_script(self):
        load_builtin_scripts()
        update_script_index = random.randint(0, len(BUILTIN_SCRIPTS) - 2)
        update_script_values = BUILTIN_SCRIPTS[update_script_index]
        script = Script.objects.get(name=update_script_values.name)
        # Put fake new data in to simulate another MAAS region updating
        # to a newer version.
        new_script = factory.make_string()
        script.script = script.script.update(new_script)

        # Fake user updates
        user_tags = [factory.make_name("tag") for _ in range(3)]
        script.tags = user_tags
        user_timeout = timedelta(random.randint(0, 1000))
        script.timeout = user_timeout
        script.save()

        # Test that subsequent scripts still get updated
        second_update_script_values = BUILTIN_SCRIPTS[update_script_index + 1]
        second_script = Script.objects.get(
            name=second_update_script_values.name)
        # Put fake old data in to simulate updating a script.
        orig_title = second_script.title
        orig_description = second_script.description
        orig_script_type = second_script.script_type
        orig_results = second_script.results
        orig_parameters = second_script.parameters

        second_script.title = factory.make_string()
        second_script.description = factory.make_string()
        second_script.script_type = factory.pick_choice(SCRIPT_TYPE_CHOICES)
        second_script.results = [factory.make_name("result")]
        second_script.script.parameters = {
            factory.make_name("param"): {
                "type": "storage"
            }
        }

        # Put fake old data in to simulate updating a script.
        old_script = VersionedTextFile.objects.create(
            data=factory.make_string())
        second_script.script = old_script

        second_script.save()

        load_builtin_scripts()

        script = reload_object(script)
        self.assertEquals(update_script_values.name, script.name)
        self.assertEquals(new_script, script.script.data)
        self.assertTrue(min([tag in script.tags for tag in user_tags]))
        self.assertEquals(user_timeout, script.timeout)
        self.assertTrue(script.default)

        second_script = reload_object(second_script)
        self.assertEquals(orig_title, second_script.title)
        self.assertEquals(orig_description, second_script.description)
        self.assertEquals(orig_script_type, second_script.script_type)
        self.assertDictEqual(orig_results, second_script.results)
        self.assertDictEqual(orig_parameters, second_script.parameters)
        self.assertEquals(old_script, second_script.script.previous_version)
        self.assertEquals(
            "Updated by maas-%s" % get_maas_version(),
            second_script.script.comment,
        )
        self.assertTrue(second_script.default)
Beispiel #20
0
 def test_always_has_current_commissioning_script_set(self):
     load_builtin_scripts()
     hostname = factory.make_name("hostname")
     register(hostname=hostname)
     rack = RackController.objects.get(hostname=hostname)
     self.assertIsNotNone(rack.current_commissioning_script_set)