def _check_hdfs_data_source_create(data): if len(data['url']) == 0: raise ex.InvalidException("HDFS url must not be empty") url = urlparse.urlparse(data['url']) if url.scheme != "hdfs": raise ex.InvalidException("URL scheme must be 'hdfs'") if not url.hostname: raise ex.InvalidException("HDFS url is incorrect, " "cannot determine a hostname")
def check_node_processes(plugin_name, version, node_processes): if len(set(node_processes)) != len(node_processes): raise ex.InvalidException("Duplicates in node processes " "have been detected") plugin_procesess = [] for process in plugin_base.PLUGINS.get_plugin( plugin_name).get_node_processes(version).values(): plugin_procesess += process if not set(node_processes).issubset(set(plugin_procesess)): raise ex.InvalidException("Plugin supports the following " "node procesess: %s" % plugin_procesess)
def check_node_group_configs(plugin_name, hadoop_version, ng_configs, plugin_configs=None): # TODO(aignatov): Should have scope and config type validations pl_confs = plugin_configs or _get_plugin_configs(plugin_name, hadoop_version) for app_target, configs in ng_configs.items(): if app_target not in pl_confs: raise ex.InvalidException("Plugin doesn't contain applicable " "target '%s'" % app_target) for name, values in configs.items(): if name not in pl_confs[app_target]: raise ex.InvalidException("Plugin's applicable target '%s' " "doesn't contain config with name " "'%s'" % (app_target, name))
def check_resize(cluster, r_node_groups): cluster_ng_names = [ng.name for ng in cluster.node_groups] check_duplicates_node_groups_names(r_node_groups) for ng in r_node_groups: if ng['name'] not in cluster_ng_names: raise ex.InvalidException("Cluster doesn't contain node group " "with name '%s'" % ng['name'])
def check_required_image_tags(plugin_name, hadoop_version, image_id): image = api.get_image(id=image_id) plugin = plugin_base.PLUGINS.get_plugin(plugin_name) req_tags = set(plugin.get_required_image_tags(hadoop_version)) if not req_tags.issubset(set(image.tags)): raise ex.InvalidException("Tags of requested image '%s' don't " "contain required tags " "['%s', '%s']" % (image_id, plugin_name, hadoop_version))
def check_cluster_template_usage(cluster_template_id, **kwargs): clusters = api.get_clusters() use_cluster_template_ids = [ cluster.cluster_template_id for cluster in clusters ] if cluster_template_id in use_cluster_template_ids: raise ex.InvalidException("Cluster template %s is use" % cluster_template_id)
def check_cluster_scaling(data, cluster_id, **kwargs): cluster = api.get_cluster(id=cluster_id) if not (plugin_base.PLUGINS.is_plugin_implements(cluster.plugin_name, 'scale_cluster') and (plugin_base.PLUGINS.is_plugin_implements(cluster.plugin_name, 'decommission_nodes'))): raise ex.InvalidException( "Requested plugin '%s' doesn't support cluster scaling feature" % cluster.plugin_name) if cluster.status != 'Active': raise ex.InvalidException("Cluster cannot be scaled not in 'Active' " "status. Cluster status: " + cluster.status) if data.get("resize_node_groups"): b.check_resize(cluster, data['resize_node_groups']) if data.get("add_node_groups"): b.check_add_node_groups(cluster, data['add_node_groups'])
def check_node_group_template_usage(node_group_template_id, **kwargs): node_groups = [] for cluster in api.get_clusters(): node_groups += cluster.node_groups for cluster_template in api.get_cluster_templates(): node_groups += cluster_template.node_groups node_group_template_ids = set( [node_group.node_group_template_id for node_group in node_groups]) if node_group_template_id in node_group_template_ids: raise ex.InvalidException("Node group template %s is use" % node_group_template_id)
def check_add_node_groups(cluster, add_node_groups): cluster_ng_names = [ng.name for ng in cluster.node_groups] check_duplicates_node_groups_names(add_node_groups) pl_confs = _get_plugin_configs(cluster.plugin_name, cluster.hadoop_version) for ng in add_node_groups: if ng['name'] in cluster_ng_names: raise ex.InvalidException("Can't add new nodegroup. Cluster " "already has nodegroup with name '%s'" % ng['name']) check_node_group_basic_fields(cluster.plugin_name, cluster.hadoop_version, ng, pl_confs)
def check_image_registered(image_id): if image_id not in [i.id for i in nova.client().images.list_registered()]: raise ex.InvalidException("Requested image '%s' is not registered" % image_id)
def check_plugin_supports_version(p_name, version): if version not in plugin_base.PLUGINS.get_plugin(p_name).get_versions(): raise ex.InvalidException("Requested plugin '%s' doesn't support" " version '%s'" % (p_name, version))
def check_plugin_name_exists(name): if name not in [p.name for p in api.get_plugins()]: raise ex.InvalidException("Savanna doesn't contain plugin with name " "'%s'" % name)
def check_flavor_exists(flavor_id): flavor_list = nova.client().flavors.list() if flavor_id not in [flavor.id for flavor in flavor_list]: raise ex.InvalidException("Requested flavor '%s' not found" % flavor_id)
def check_cinder_exists(): services = [service.name for service in keystone.client().services.list()] if 'cinder' not in services: raise ex.InvalidException("Cinder is not supported")
def check_floatingip_pool_exists(ng_name, pool_id): if not nova.get_network(id=pool_id): raise ex.InvalidException("Floating IP pool %s for node group " "'%s' not found" % (pool_id, ng_name))
def check_cluster_template_exists(cluster_template_id): if not api.get_cluster_template(id=cluster_template_id): raise ex.InvalidException("Cluster template with id '%s'" " doesn't exist" % cluster_template_id)
def check_node_group_template_exists(ng_tmpl_id): if not api.get_node_group_template(id=ng_tmpl_id): raise ex.InvalidException("NodeGroup template with id '%s'" " doesn't exist" % ng_tmpl_id)
def decommission_nodes(self, cluster, instances): raise exc.InvalidException('The HDP plugin does not yet support the ' 'decommissioning of nodes')
def check_keypair_exists(keypair): try: nova.client().keypairs.get(keypair) except nova_ex.NotFound: raise ex.InvalidException("Requested keypair '%s' not found" % keypair)
def check_cluster_exists(id): if not api.get_cluster(id): raise ex.InvalidException("Cluster with id '%s'" " doesn't exist" % id)
def check_flavor_exists(flavor_id): try: nova.client().flavors.get(flavor_id) except nova_ex.NotFound: raise ex.InvalidException("Requested flavor '%s' not found" % flavor_id)
def check_network_exists(net_id): if not nova.get_network(id=net_id): raise ex.InvalidException("Network %s not found" % net_id)
def check_job_binary_internal_exists(jbi_id): if not api.get_job_binary_internal(jbi_id): raise ex.InvalidException("JobBinaryInternal with id '%s'" " doesn't exist" % jbi_id)
def check_data_source_exists(data_source_id): if not api.get_data_source(data_source_id): raise ex.InvalidException("DataSource with id '%s'" " doesn't exist" % data_source_id)
def check_duplicates_node_groups_names(node_groups): ng_names = [ng['name'] for ng in node_groups] if len(set(ng_names)) < len(node_groups): raise ex.InvalidException("Duplicates in node group names " "are detected")
def check_convert_to_template(plugin_name, version, **kwargs): if not plugin_base.PLUGINS.is_plugin_implements(plugin_name, 'convert'): raise ex.InvalidException( "Requested plugin '%s' doesn't support converting config files " "to cluster templates" % plugin_name)