예제 #1
0
파일: keypairs.py 프로젝트: Imperat/sahara
def provision_keypairs(cluster, instances=None):
    extra = cluster.extra.to_dict() if cluster.extra else {}
    # use same keypair for scaling
    keypair = extra.get('vanilla_keypair')
    if not instances:
        instances = utils.get_instances(cluster)
    else:
        # scaling
        if not keypair:
            # cluster created before mitaka, skipping provisioning
            return
    if not keypair:
        private, public = crypto.generate_key_pair()
        keypair = {'public': public, 'private': private}
        extra['vanilla_keypair'] = keypair
        extra['vanilla_keypair']['private'] = _store_secret(
            keypair['private'])
        cond.cluster_update(context.ctx(), cluster, {'extra': extra})
    else:
        keypair['private'] = _get_secret(keypair['private'])
    with context.ThreadGroup() as tg:
        for instance in instances:
            tg.spawn(
                'provision-key-%s' % instance.instance_name,
                _provision_key, instance, keypair)
예제 #2
0
    def test_to_paramiko_private_key(self):
        pk_str = c.generate_key_pair()[0]
        pk = c.to_paramiko_private_key(pk_str)

        self.assertIsNotNone(pk)
        self.assertEqual(2048, pk.size)
        self.assertEqual('ssh-rsa', pk.get_name())
예제 #3
0
파일: manager.py 프로젝트: xinw1012/sahara
    def cluster_create(self, context, values):
        """Create a cluster from the values dictionary."""

        # loading defaults
        merged_values = copy.deepcopy(CLUSTER_DEFAULTS)
        merged_values['tenant_id'] = context.tenant_id

        private_key, public_key = crypto.generate_key_pair()
        merged_values['management_private_key'] = private_key
        merged_values['management_public_key'] = public_key

        cluster_template_id = values.get('cluster_template_id')
        c_tmpl = None

        if cluster_template_id:
            c_tmpl = self.cluster_template_get(context, cluster_template_id)

            del c_tmpl['created_at']
            del c_tmpl['updated_at']
            del c_tmpl['id']

            # updating with cluster_template values
            merged_values.update(c_tmpl)

        # updating with values provided in request
        merged_values.update(values)

        if c_tmpl:
            merged_values['cluster_configs'] = configs.merge_configs(
                c_tmpl.get('cluster_configs'), values.get('cluster_configs'))

        merged_values['node_groups'] = self._populate_node_groups(
            context, merged_values)

        return self.db.cluster_create(context, merged_values)
예제 #4
0
def create_hadoop_ssh_keys(cluster):
    private_key, public_key = crypto.generate_key_pair()
    extra = {
        'hadoop_private_ssh_key': private_key,
        'hadoop_public_ssh_key': public_key
    }
    return conductor.cluster_update(context.ctx(), cluster, {'extra': extra})
예제 #5
0
    def test_to_paramiko_private_key(self):
        pk_str = c.generate_key_pair()[0]
        pk = c.to_paramiko_private_key(pk_str)

        self.assertIsNotNone(pk)
        self.assertEqual(2048, pk.size)
        self.assertEqual('ssh-rsa', pk.get_name())
예제 #6
0
def create_hadoop_ssh_keys(cluster):
    private_key, public_key = crypto.generate_key_pair()
    extra = {
        'hadoop_private_ssh_key': private_key,
        'hadoop_public_ssh_key': public_key
    }
    return conductor.cluster_update(context.ctx(), cluster, {'extra': extra})
예제 #7
0
    def cluster_create(self, context, values):
        """Create a cluster from the values dictionary."""

        # loading defaults
        merged_values = copy.deepcopy(CLUSTER_DEFAULTS)
        merged_values["tenant_id"] = context.tenant_id

        private_key, public_key = crypto.generate_key_pair()
        merged_values["management_private_key"] = private_key
        merged_values["management_public_key"] = public_key

        cluster_template_id = values.get("cluster_template_id")
        c_tmpl = None

        if cluster_template_id:
            c_tmpl = self.cluster_template_get(context, cluster_template_id)

            del c_tmpl["created_at"]
            del c_tmpl["updated_at"]
            del c_tmpl["id"]

            # updating with cluster_template values
            merged_values.update(c_tmpl)

        # updating with values provided in request
        merged_values.update(values)

        if c_tmpl:
            merged_values["cluster_configs"] = configs.merge_configs(
                c_tmpl.get("cluster_configs"), values.get("cluster_configs")
            )

        merged_values["node_groups"] = self._populate_node_groups(context, merged_values)

        return self.db.cluster_create(context, merged_values)
예제 #8
0
    def test_generate_key_pair(self):
        kp = c.generate_key_pair()

        self.assertIsInstance(kp, tuple)
        self.assertIsNotNone(kp[0])
        self.assertIsNotNone(kp[1])
        self.assertIn('-----BEGIN RSA PRIVATE KEY-----', kp[0])
        self.assertIn('-----END RSA PRIVATE KEY-----', kp[0])
        self.assertIn('ssh-rsa ', kp[1])
        self.assertIn('Generated by Sahara', kp[1])
예제 #9
0
    def test_generate_key_pair(self):
        kp = c.generate_key_pair()

        self.assertIsInstance(kp, tuple)
        self.assertIsNotNone(kp[0])
        self.assertIsNotNone(kp[1])
        self.assertIn('-----BEGIN RSA PRIVATE KEY-----', kp[0])
        self.assertIn('-----END RSA PRIVATE KEY-----', kp[0])
        self.assertIn('ssh-rsa ', kp[1])
        self.assertIn('Generated by Sahara', kp[1])
예제 #10
0
def get_hadoop_ssh_keys(cluster):
    extra = cluster.extra.to_dict() if cluster.extra else {}
    private_key = extra.get('hadoop_private_ssh_key')
    public_key = extra.get('hadoop_public_ssh_key')
    if not private_key or not public_key:
        private_key, public_key = crypto.generate_key_pair()
        extra['hadoop_private_ssh_key'] = private_key
        extra['hadoop_public_ssh_key'] = public_key
        conductor.cluster_update(context.ctx(), cluster, {'extra': extra})

    return private_key, public_key
예제 #11
0
def get_hadoop_ssh_keys(cluster):
    extra = cluster.extra.to_dict() if cluster.extra else {}
    private_key = extra.get('hadoop_private_ssh_key')
    public_key = extra.get('hadoop_public_ssh_key')
    if not private_key or not public_key:
        private_key, public_key = crypto.generate_key_pair()
        extra['hadoop_private_ssh_key'] = private_key
        extra['hadoop_public_ssh_key'] = public_key
        conductor.cluster_update(context.ctx(), cluster, {'extra': extra})

    return private_key, public_key
예제 #12
0
 def setUp(self):
     super(BaseTestCase, self).setUp()
     self._init_clients()
     timeouts.Defaults.init_defaults(self.testcase)
     self.testcase['ssh_username'] = self.sahara.sahara_client.images.get(
         self.nova.get_image_id(self.testcase['image'])).username
     self.private_key, self.public_key = ssh.generate_key_pair()
     self.key_name = self.__create_keypair()
     self.plugin_opts = {
         'plugin_name': self.testcase['plugin_name'],
         'hadoop_version': self.testcase['plugin_version']
     }
     self.template_path = DEFAULT_TEMPLATES_PATH % self.plugin_opts
예제 #13
0
파일: base.py 프로젝트: snowind/sahara
 def setUp(self):
     super(BaseTestCase, self).setUp()
     self._init_clients()
     timeouts.Defaults.init_defaults(self.testcase)
     self.testcase['ssh_username'] = self.sahara.sahara_client.images.get(
         self.nova.get_image_id(self.testcase['image'])).username
     self.private_key, self.public_key = ssh.generate_key_pair()
     self.key_name = self.__create_keypair()
     self.plugin_opts = {
         'plugin_name': self.testcase['plugin_name'],
         'hadoop_version': self.testcase['plugin_version']
     }
     self.template_path = DEFAULT_TEMPLATES_PATH % self.plugin_opts
예제 #14
0
def get_hadoop_ssh_keys(cluster,extra):
    if cluster.extra:
        extra.update(cluster.extra)
    nextra = {}
    private_key = extra.get('hadoop_private_ssh_key')
    public_key = extra.get('hadoop_public_ssh_key')
    if not private_key or not public_key:
        private_key, public_key = crypto.generate_key_pair()
        nextra['hadoop_private_ssh_key'] = private_key
        nextra['hadoop_public_ssh_key'] = public_key
        conductor.cluster_update(context.ctx(), cluster, {'extra': nextra})
        extra.update(nextra)

    return private_key, public_key
예제 #15
0
def get_hadoop_ssh_keys(cluster, extra):
    if cluster.extra:
        extra.update(cluster.extra)
    nextra = {}
    private_key = extra.get('hadoop_private_ssh_key')
    public_key = extra.get('hadoop_public_ssh_key')
    if not private_key or not public_key:
        private_key, public_key = crypto.generate_key_pair()
        nextra['hadoop_private_ssh_key'] = private_key
        nextra['hadoop_public_ssh_key'] = public_key
        conductor.cluster_update(context.ctx(), cluster, {'extra': nextra})
        extra.update(nextra)

    return private_key, public_key
예제 #16
0
파일: base.py 프로젝트: jfrodriguez/sahara
 def setUp(self):
     super(BaseTestCase, self).setUp()
     self._init_clients()
     timeouts.Defaults.init_defaults(self.testcase)
     self.testcase['ssh_username'] = self.sahara.sahara_client.images.get(
         self.nova.get_image_id(self.testcase['image'])).username
     self.private_key, self.public_key = ssh.generate_key_pair()
     self.key_name = self.__create_keypair()
     # save the private key if retain_resources is specified
     # (useful for debugging purposes)
     if self.testcase['retain_resources']:
         with open(self.key_name + '.key', 'a') as private_key_file:
             private_key_file.write(self.private_key)
     self.plugin_opts = {
         'plugin_name': self.testcase['plugin_name'],
         'hadoop_version': self.testcase['plugin_version']
     }
     self.template_path = DEFAULT_TEMPLATES_PATH % self.plugin_opts
예제 #17
0
def _create_cluster_mock(node_groups, aa):

    user_kp = mock.Mock()
    user_kp.public_key = "123"
    private_key = c.generate_key_pair()[0]

    dct = {'name': 'test_cluster',
           'plugin_name': 'mock_plugin',
           'hadoop_version': 'mock_version',
           'default_image_id': 'initial',
           'user_keypair_id': 'user_keypair',
           'anti_affinity': aa,
           '_user_kp': user_kp,
           'private_key': private_key,
           'node_groups': node_groups}

    cluster = conductor.cluster_create(context.ctx(), dct)

    return cluster
예제 #18
0
파일: base.py 프로젝트: uladz/sahara
 def setUp(self):
     super(BaseTestCase, self).setUp()
     self._init_clients()
     timeouts.Defaults.init_defaults(self.testcase)
     self.testcase['ssh_username'] = self.sahara.sahara_client.images.get(
         self.nova.get_image_id(self.testcase['image'])).username
     self.key = self.testcase.get('key_name')
     if self.key is None:
         self.private_key, self.public_key = ssh.generate_key_pair()
         self.key_name = self.__create_keypair()
     # save the private key if retain_resources is specified
     # (useful for debugging purposes)
     if self.testcase['retain_resources'] or self.key is None:
         with open(self.key_name + '.key', 'a') as private_key_file:
             private_key_file.write(self.private_key)
     self.plugin_opts = {
         'plugin_name': self.testcase['plugin_name'],
         'hadoop_version': self.testcase['plugin_version']
     }
     self.template_path = DEFAULT_TEMPLATES_PATH % self.plugin_opts
     self.cinder = True
예제 #19
0
파일: base.py 프로젝트: egafford/sahara
 def setUp(self):
     super(BaseTestCase, self).setUp()
     self._init_clients()
     timeouts.Defaults.init_defaults(self.testcase)
     self.testcase["ssh_username"] = self.sahara.sahara_client.images.get(
         self.nova.get_image_id(self.testcase["image"])
     ).username
     self.key = self.testcase.get("key_name")
     if self.key is None:
         self.private_key, self.public_key = ssh.generate_key_pair()
         self.key_name = self.__create_keypair()
     # save the private key if retain_resources is specified
     # (useful for debugging purposes)
     if self.testcase["retain_resources"] or self.key is None:
         with open(self.key_name + ".key", "a") as private_key_file:
             private_key_file.write(self.private_key)
     self.plugin_opts = {
         "plugin_name": self.testcase["plugin_name"],
         "hadoop_version": self.testcase["plugin_version"],
     }
     self.template_path = DEFAULT_TEMPLATES_PATH % self.plugin_opts
     self.cinder = True
예제 #20
0
def provision_keypairs(cluster, instances=None):
    extra = cluster.extra.to_dict() if cluster.extra else {}
    # use same keypair for scaling
    keypair = extra.get('vanilla_keypair')
    if not instances:
        instances = utils.get_instances(cluster)
    else:
        # scaling
        if not keypair:
            # cluster created before mitaka, skipping provisioning
            return
    if not keypair:
        private, public = crypto.generate_key_pair()
        keypair = {'public': public, 'private': private}
        extra['vanilla_keypair'] = keypair
        extra['vanilla_keypair']['private'] = _store_secret(keypair['private'])
        cond.cluster_update(context.ctx(), cluster, {'extra': extra})
    else:
        keypair['private'] = _get_secret(keypair['private'])
    with context.ThreadGroup() as tg:
        for instance in instances:
            tg.spawn('provision-key-%s' % instance.instance_name,
                     _provision_key, instance, keypair)
예제 #21
0
def generate_key_pair(key_length=2048, **kwargs):
    return crypto.generate_key_pair(key_length)