Ejemplo n.º 1
0
    def start_cluster(self, reactor):
        """
        Provision cloud cluster for acceptance tests.

        :return Cluster: The cluster to connect to for acceptance tests.
        """
        metadata = {
            'purpose': 'acceptance-testing',
            'distribution': self.distribution,
        }
        metadata.update(self.metadata)

        for index in range(self.num_nodes):
            name = "acceptance-test-%s-%d" % (self.creator, index)
            try:
                print "Creating node %d: %s" % (index, name)
                node = self.provisioner.create_node(
                    name=name,
                    distribution=self.distribution,
                    metadata=metadata,
                )
            except:
                print "Error creating node %d: %s" % (index, name)
                print "It may have leaked into the cloud."
                raise

            yield remove_known_host(reactor, node.address)
            self.nodes.append(node)
            del node

        commands = parallel([
            node.provision(package_source=self.package_source,
                           variants=self.variants)
            for node in self.nodes
        ])
        if self.dataset_backend == DatasetBackend.zfs:
            zfs_commands = parallel([
                configure_zfs(node, variants=self.variants)
                for node in self.nodes
            ])
            commands = commands.on(success=lambda _: zfs_commands)

        yield perform(make_dispatcher(reactor), commands)

        cluster = yield configured_cluster_for_nodes(
            reactor,
            generate_certificates(
                make_cluster_id(
                    TestTypes.ACCEPTANCE,
                    _provider_for_cluster_id(self.dataset_backend),
                ),
                self.nodes),
            self.nodes,
            self.dataset_backend,
            self.dataset_backend_configuration,
            _save_backend_configuration(self.dataset_backend,
                                        self.dataset_backend_configuration)
        )

        returnValue(cluster)
Ejemplo n.º 2
0
    def start_cluster(self, reactor):
        """
        Provision cloud cluster for acceptance tests.

        :return Cluster: The cluster to connect to for acceptance tests.
        """
        metadata = {
            'purpose': 'acceptance-testing',
            'distribution': self.distribution,
        }
        metadata.update(self.metadata)

        for index in range(self.num_nodes):
            name = "acceptance-test-%s-%d" % (self.creator, index)
            try:
                print "Creating node %d: %s" % (index, name)
                node = self.provisioner.create_node(
                    name=name,
                    distribution=self.distribution,
                    metadata=metadata,
                )
            except:
                print "Error creating node %d: %s" % (index, name)
                print "It may have leaked into the cloud."
                raise

            yield remove_known_host(reactor, node.address)
            self.nodes.append(node)
            del node

        commands = parallel([
            node.provision(package_source=self.package_source,
                           variants=self.variants)
            for node in self.nodes
        ])
        if self.dataset_backend == DatasetBackend.zfs:
            zfs_commands = parallel([
                configure_zfs(node, variants=self.variants)
                for node in self.nodes
            ])
            commands = commands.on(success=lambda _: zfs_commands)

        yield perform(make_dispatcher(reactor), commands)

        cluster = yield configured_cluster_for_nodes(
            reactor,
            generate_certificates(
                make_cluster_id(
                    TestTypes.ACCEPTANCE,
                    _provider_for_cluster_id(self.dataset_backend),
                ),
                self.nodes),
            self.nodes,
            self.dataset_backend,
            self.dataset_backend_configuration,
            _save_backend_configuration(self.dataset_backend,
                                        self.dataset_backend_configuration)
        )

        returnValue(cluster)
Ejemplo n.º 3
0
 def configure(ignored):
     return configured_cluster_for_nodes(
         reactor,
         generate_certificates(
             make_cluster_id(TestTypes.ACCEPTANCE, _provider_for_cluster_id(self.dataset_backend)), self._nodes
         ),
         self._nodes,
         self.dataset_backend,
         self.dataset_backend_configuration,
     )
Ejemplo n.º 4
0
 def configure(ignored):
     return configured_cluster_for_nodes(
         reactor,
         generate_certificates(
             make_cluster_id(
                 TestTypes.ACCEPTANCE,
                 _provider_for_cluster_id(self.dataset_backend),
             ), self._nodes),
         self._nodes,
         self.dataset_backend,
         self.dataset_backend_configuration,
     )
Ejemplo n.º 5
0
 def _make_cluster_identity(self, dataset_backend):
     """
     Build a cluster identity based on the parameters.
     """
     cluster_id = make_cluster_id(
         TestTypes.ACCEPTANCE,
         _provider_for_cluster_id(dataset_backend),
     )
     return ClusterIdentity(
         purpose=u'acceptance-testing',
         prefix=u'acceptance-test',
         name=b'acceptance-cluster',
         id=cluster_id,
     )
Ejemplo n.º 6
0
 def setUp(self):
     self._cluster_id = cluster_utils.make_cluster_id(
         cluster_utils.TestTypes.FUNCTIONAL)
     self._api = digitalocean_flocker_plugin.do_from_configuration(
         self._cluster_id, token='this-is-not-a-token')
Ejemplo n.º 7
0
    def start_cluster(self, reactor):
        """
        Provision cloud cluster for acceptance tests.

        :return Cluster: The cluster to connect to for acceptance tests.
        """
        metadata = {
            'purpose': 'acceptance-testing',
            'distribution': self.distribution,
        }
        metadata.update(self.metadata)

        # Try to make names unique even if the same creator is starting
        # multiple clusters at the same time.  This lets other code use the
        # name as a way to identify nodes.  This is only necessary in one
        # place, the node creation code, to perform cleanup when the create
        # operation fails in a way such that it isn't clear if the instance has
        # been created or not.
        random_tag = os.urandom(8).encode("base64").strip("\n=")
        print "Assigning random tag:", random_tag

        for index in range(self.num_nodes):
            name = "acceptance-test-%s-%s-%d" % (
                self.creator, random_tag, index,
            )
            try:
                print "Creating node %d: %s" % (index, name)
                node = self.provisioner.create_node(
                    name=name,
                    distribution=self.distribution,
                    metadata=metadata,
                )
            except:
                print "Error creating node %d: %s" % (index, name)
                print "It may have leaked into the cloud."
                raise

            yield remove_known_host(reactor, node.address)
            self.nodes.append(node)
            del node

        commands = parallel([
            node.provision(package_source=self.package_source,
                           variants=self.variants)
            for node in self.nodes
        ])
        if self.dataset_backend == DatasetBackend.zfs:
            zfs_commands = parallel([
                configure_zfs(node, variants=self.variants)
                for node in self.nodes
            ])
            commands = commands.on(success=lambda _: zfs_commands)

        yield perform(make_dispatcher(reactor), commands)

        cluster = yield configured_cluster_for_nodes(
            reactor,
            generate_certificates(
                make_cluster_id(
                    TestTypes.ACCEPTANCE,
                    _provider_for_cluster_id(self.dataset_backend),
                ),
                self.nodes),
            self.nodes,
            self.dataset_backend,
            self.dataset_backend_configuration,
            _save_backend_configuration(self.dataset_backend,
                                        self.dataset_backend_configuration)
        )

        returnValue(cluster)