def cluster_create(self, context, values): """Create a cluster from the values dictionary.""" # loading defaults merged_values = copy.deepcopy(CLUSTER_DEFAULTS) merged_values["tenant_id"] = context.tenant_id private_key, public_key = crypto.generate_key_pair() merged_values["management_private_key"] = private_key merged_values["management_public_key"] = public_key cluster_template_id = values.get("cluster_template_id") c_tmpl = None if cluster_template_id: c_tmpl = self.cluster_template_get(context, cluster_template_id) del c_tmpl["created_at"] del c_tmpl["updated_at"] del c_tmpl["id"] # updating with cluster_template values merged_values.update(c_tmpl) # updating with values provided in request merged_values.update(values) if c_tmpl: merged_values["cluster_configs"] = configs.merge_configs( c_tmpl.get("cluster_configs"), values.get("cluster_configs") ) merged_values["node_groups"] = self._populate_node_groups(context, merged_values) return self.db.cluster_create(context, merged_values)
def test_to_paramiko_private_key(self): pk_str = c.generate_key_pair()[0] pk = c.to_paramiko_private_key(pk_str) self.assertIsNotNone(pk) self.assertEqual(2048, pk.size) self.assertEqual('ssh-rsa', pk.get_name())
def create_hadoop_ssh_keys(cluster): private_key, public_key = crypto.generate_key_pair() extra = { 'hadoop_private_ssh_key': private_key, 'hadoop_public_ssh_key': public_key } return conductor.cluster_update(context.ctx(), cluster, {'extra': extra})
def cluster_create(self, context, values): """Create a cluster from the values dictionary.""" values = copy.deepcopy(values) values = _apply_defaults(values, CLUSTER_DEFAULTS) values['tenant_id'] = context.tenant_id private_key, public_key = crypto.generate_key_pair() values['management_private_key'] = private_key values['management_public_key'] = public_key cluster_template_id = values.get('cluster_template_id') if cluster_template_id: c_tmpl = self.cluster_template_get(context, cluster_template_id) if c_tmpl: new_values = c_tmpl.copy() del new_values['created_at'] del new_values['updated_at'] del new_values['id'] new_values.update(values) new_values['cluster_configs'] = configs.merge_configs( c_tmpl.get('cluster_configs'), values.get('cluster_configs')) values = new_values self._populate_node_groups(context, values) return self.db.cluster_create(context, values)
def cluster_create(self, context, values): """Create a cluster from the values dictionary.""" #loading defaults merged_values = copy.deepcopy(CLUSTER_DEFAULTS) merged_values['tenant_id'] = context.tenant_id private_key, public_key = crypto.generate_key_pair() merged_values['management_private_key'] = private_key merged_values['management_public_key'] = public_key cluster_template_id = values.get('cluster_template_id') c_tmpl = None if cluster_template_id: c_tmpl = self.cluster_template_get(context, cluster_template_id) del c_tmpl['created_at'] del c_tmpl['updated_at'] del c_tmpl['id'] #updating with cluster_template values merged_values.update(c_tmpl) #updating with values provided in request merged_values.update(values) if c_tmpl: merged_values['cluster_configs'] = configs.merge_configs( c_tmpl.get('cluster_configs'), values.get('cluster_configs')) merged_values['node_groups'] = \ self._populate_node_groups(context, merged_values) return self.db.cluster_create(context, merged_values)
def test_generate_key_pair(self): kp = c.generate_key_pair() self.assertIsInstance(kp, tuple) self.assertIsNotNone(kp[0]) self.assertIsNotNone(kp[1]) self.assertIn('-----BEGIN RSA PRIVATE KEY-----', kp[0]) self.assertIn('-----END RSA PRIVATE KEY-----', kp[0]) self.assertIn('ssh-rsa ', kp[1]) self.assertIn('Generated by Savanna', kp[1])
def get_hadoop_ssh_keys(cluster): extra = cluster.extra or {} private_key = extra.get('hadoop_private_ssh_key') public_key = extra.get('hadoop_public_ssh_key') if not private_key or not public_key: private_key, public_key = crypto.generate_key_pair() extra['hadoop_private_ssh_key'] = private_key extra['hadoop_public_ssh_key'] = public_key conductor.cluster_update(context.ctx(), cluster, {'extra': extra}) return private_key, public_key
def _create_cluster_mock(node_groups, aa): user_kp = mock.Mock() user_kp.public_key = "123" private_key = c.generate_key_pair()[0] dct = {'name': 'test_cluster', 'plugin_name': 'mock_plugin', 'hadoop_version': 'mock_version', 'default_image_id': 'initial', 'user_keypair_id': 'user_keypair', 'anti_affinity': aa, '_user_kp': user_kp, 'private_key': private_key, 'node_groups': node_groups} cluster = conductor.cluster_create(context.ctx(), dct) return cluster
def _create_cluster_mock(node_groups, aa): user_kp = mock.Mock() user_kp.public_key = "123" private_key = c.generate_key_pair()[0] dct = { "name": "test_cluster", "plugin_name": "mock_plugin", "hadoop_version": "mock_version", "default_image_id": "initial", "user_keypair_id": "user_keypair", "anti_affinity": aa, "_user_kp": user_kp, "private_key": private_key, "node_groups": node_groups, } cluster = conductor.cluster_create(context.ctx(), dct) return cluster