def handle(self, request, context): try: sahara = saharaclient.client(request) #TODO(nkonovalov) Implement AJAX Node Groups node_groups = None plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) cluster_template_id = context["general_cluster_template"] or None user_keypair = context["general_keypair"] or None sahara.clusters.create( context["general_cluster_name"], plugin, hadoop_version, cluster_template_id=cluster_template_id, default_image_id=context["general_image"], description=context["general_description"], node_groups=node_groups, user_keypair_id=user_keypair, net_id=context.get("general_neutron_management_network", None)) return True except api_base.APIException as e: self.error_description = str(e) return False except Exception: exceptions.handle(request)
def handle(self, request, context): try: sahara = saharaclient.client(request) # TODO(nkonovalov) Implement AJAX Node Groups node_groups = None plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) cluster_template_id = context["general_cluster_template"] or None user_keypair = context["general_keypair"] or None sahara.clusters.create( context["general_cluster_name"], plugin, hadoop_version, cluster_template_id=cluster_template_id, default_image_id=context["general_image"], description=context["general_description"], node_groups=node_groups, user_keypair_id=user_keypair, net_id=context.get("general_neutron_management_network", None)) return True except api_base.APIException as e: self.error_description = str(e) return False except Exception: exceptions.handle(request)
def __init__(self, request, *args, **kwargs): super(ConfigureNodegroupsAction, self). \ __init__(request, *args, **kwargs) sahara = saharaclient.client(request) plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) self.templates = sahara.node_group_templates.find( plugin_name=plugin, hadoop_version=hadoop_version) deletable = request.REQUEST.get("deletable", dict()) if 'forms_ids' in request.POST: self.groups = [] for id in json.loads(request.POST['forms_ids']): group_name = "group_name_" + str(id) template_id = "template_id_" + str(id) count = "count_" + str(id) self.groups.append({"name": request.POST[group_name], "template_id": request.POST[template_id], "count": request.POST[count], "id": id, "deletable": deletable.get( request.POST[group_name], "true")}) whelpers.build_node_group_fields(self, group_name, template_id, count)
def get_unique_binary_name(self, request, base_name): sahara = saharaclient.client(request) internals = sahara.job_binary_internals.list() names = [internal.name for internal in internals] if base_name in names: return "%s_%s" % (base_name, uuid.uuid1()) return base_name
def __init__(self, request, *args, **kwargs): super(SelectPluginAction, self).__init__(request, *args, **kwargs) sahara = saharaclient.client(request) plugins = sahara.plugins.list() plugin_choices = [(plugin.name, plugin.title) for plugin in plugins] self.fields["plugin_name"] = forms.ChoiceField( label=_("Plugin name"), required=True, choices=plugin_choices, widget=forms.Select(attrs={"class": "plugin_name_choice"})) for plugin in plugins: field_name = plugin.name + "_version" choice_field = forms.ChoiceField( label=_("Hadoop version"), required=True, choices=[(version, version) for version in plugin.versions], widget=forms.Select( attrs={ "class": "plugin_version_choice " + field_name + "_choice" })) self.fields[field_name] = choice_field
def handle(self, request, context): try: sahara = saharaclient.client(request) node_groups = [] configs_dict = whelpers.parse_configs_from_context( context, self.defaults) ids = json.loads(context['ng_forms_ids']) for id in ids: name = context['ng_group_name_' + str(id)] template_id = context['ng_template_id_' + str(id)] count = context['ng_count_' + str(id)] ng = { "name": name, "node_group_template_id": template_id, "count": count } node_groups.append(ng) plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) #TODO(nkonovalov): Fix client to support default_image_id sahara.cluster_templates.create( context["general_cluster_template_name"], plugin, hadoop_version, context["general_description"], configs_dict, node_groups, context["anti_affinity_info"]) return True except api_base.APIException as e: self.error_description = str(e) return False except Exception: exceptions.handle(request)
def __init__(self, request, *args, **kwargs): super(ConfigureNodegroupsAction, self). \ __init__(request, *args, **kwargs) sahara = saharaclient.client(request) plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) self.templates = sahara.node_group_templates.find( plugin_name=plugin, hadoop_version=hadoop_version) deletable = request.REQUEST.get("deletable", dict()) if 'forms_ids' in request.POST: self.groups = [] for id in json.loads(request.POST['forms_ids']): group_name = "group_name_" + str(id) template_id = "template_id_" + str(id) count = "count_" + str(id) self.groups.append({ "name": request.POST[group_name], "template_id": request.POST[template_id], "count": request.POST[count], "id": id, "deletable": deletable.get(request.POST[group_name], "true") }) whelpers.build_node_group_fields(self, group_name, template_id, count)
def __init__(self, request, *args, **kwargs): super(UploadFileForm, self).__init__(request, *args, **kwargs) sahara = saharaclient.client(request) self._generate_plugin_version_fields(sahara) self.fields['template_file'] = forms.FileField(label=_("Template"), required=True)
def populate_job_binary_internal_choices(self, request): sahara = saharaclient.client(request) job_binaries = sahara.job_binary_internals.list() choices = [(job_binary.id, job_binary.name) for job_binary in job_binaries] choices.insert(0, (self.NEW_SCRIPT, '*Create a script')) choices.insert(0, (self.UPLOAD_BIN, '*Upload a new file')) return choices
def __init__(self, request, context_seed, entry_point, *args, **kwargs): ScaleCluster._cls_registry = set([]) sahara = saharaclient.client(request) cluster_id = context_seed["cluster_id"] cluster = sahara.clusters.get(cluster_id) self.success_message = "Scaling Cluster %s successfully started" \ % cluster.name plugin = cluster.plugin_name hadoop_version = cluster.hadoop_version #init deletable nodegroups deletable = dict() for group in cluster.node_groups: deletable[group["name"]] = "false" request.GET = request.GET.copy() request.GET.update({"cluster_id": cluster_id}) request.GET.update({"plugin_name": plugin}) request.GET.update({"hadoop_version": hadoop_version}) request.GET.update({"deletable": deletable}) super(ScaleCluster, self).__init__(request, context_seed, entry_point, *args, **kwargs) #init Node Groups for step in self.steps: if isinstance(step, clt_create_flow.ConfigureNodegroups): ng_action = step.action template_ngs = cluster.node_groups if 'forms_ids' not in request.POST: ng_action.groups = [] for id in range(0, len(template_ngs), 1): group_name = "group_name_" + str(id) template_id = "template_id_" + str(id) count = "count_" + str(id) templ_ng = template_ngs[id] ng_action.groups.append( {"name": templ_ng["name"], "template_id": templ_ng["node_group_template_id"], "count": templ_ng["count"], "id": id, "deletable": "false"}) whelpers.build_node_group_fields(ng_action, group_name, template_id, count)
def populate_image_choices(self, request, context): sahara = saharaclient.client(request) all_images = sahara.images.list() plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) details = sahara.plugins.get_version_details(plugin, hadoop_version) return [(image.id, image.name) for image in all_images if set(details.required_image_tags).issubset(set(image.tags))]
def action(self, request, obj_id): sahara = saharaclient.client(request) jb = sahara.job_binaries.get(obj_id) (jb_type, jb_internal_id) = jb.url.split("://") if jb_type == "internal-db": try: sahara.job_binary_internals.delete(jb_internal_id) except api_base.APIException: # nothing to do for job-binary-internal if # it does not exist. pass sahara.job_binaries.delete(obj_id)
def handle(self, request, context): sahara = saharaclient.client(request) cluster_id = request.GET["cluster_id"] cluster = sahara.clusters.get(cluster_id) existing_node_groups = set([]) for ng in cluster.node_groups: existing_node_groups.add(ng["name"]) scale_object = dict() ids = json.loads(context["ng_forms_ids"]) for _id in ids: name = context["ng_group_name_%s" % _id] template_id = context["ng_template_id_%s" % _id] count = context["ng_count_%s" % _id] if name not in existing_node_groups: if "add_node_groups" not in scale_object: scale_object["add_node_groups"] = [] scale_object["add_node_groups"].append( {"name": name, "node_group_template_id": template_id, "count": int(count)}) else: old_count = None for ng in cluster.node_groups: if name == ng["name"]: old_count = ng["count"] break if old_count != count: if "resize_node_groups" not in scale_object: scale_object["resize_node_groups"] = [] scale_object["resize_node_groups"].append( {"name": name, "count": int(count)} ) try: sahara.clusters.scale(cluster_id, scale_object) return True except api_base.APIException as e: self.error_description = str(e) return False except Exception: exceptions.handle(request)
def __init__(self, request, *args, **kwargs): super(GeneralConfigAction, self).__init__(request, *args, **kwargs) sahara = saharaclient.client(request) hlps = helpers.Helpers(sahara) plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) process_choices = [] version_details = sahara.plugins.get_version_details(plugin, hadoop_version) for service, processes in version_details.node_processes.items(): for process in processes: process_choices.append( (str(service) + ":" + str(process), process)) if not saharaclient.AUTO_ASSIGNMENT_ENABLED: pools = network.floating_ip_pools_list(request) pool_choices = [(pool.id, pool.name) for pool in pools] pool_choices.insert(0, (None, "Do not assign floating IPs")) self.fields['floating_ip_pool'] = forms.ChoiceField( label=_("Floating IP pool"), choices=pool_choices, required=False) self.fields["processes"] = forms.MultipleChoiceField( label=_("Processes"), required=True, widget=forms.CheckboxSelectMultiple(), help_text=_("Processes to be launched in node group"), choices=process_choices) self.fields["plugin_name"] = forms.CharField( widget=forms.HiddenInput(), initial=plugin ) self.fields["hadoop_version"] = forms.CharField( widget=forms.HiddenInput(), initial=hadoop_version ) node_parameters = hlps.get_general_node_group_configs(plugin, hadoop_version) for param in node_parameters: self.fields[param.name] = whelpers.build_control(param)
def __init__(self, request, context_seed, entry_point, *args, **kwargs): sahara = saharaclient.client(request) hlps = helpers.Helpers(sahara) plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) general_parameters = hlps.get_general_node_group_configs( plugin, hadoop_version) service_parameters = hlps.get_targeted_node_group_configs( plugin, hadoop_version) self._populate_tabs(general_parameters, service_parameters) super(ConfigureNodegroupTemplate, self).__init__(request, context_seed, entry_point, *args, **kwargs)
def handle_internal(self, request, context): result = "" sahara = saharaclient.client(request) bin_id = context["job_binary_internal"] if(bin_id == self.UPLOAD_BIN): result = sahara.job_binary_internals.create( self.get_unique_binary_name( request, request.FILES["job_binary_file"].name), request.FILES["job_binary_file"].read()) elif(bin_id == self.NEW_SCRIPT): result = sahara.job_binary_internals.create( self.get_unique_binary_name( request, context["job_binary_script_name"]), context["job_binary_script"]) bin_id = result.id return "internal-db://%s" % bin_id
def _get_available_images(self, request): images = self._get_tenant_images(request) if request.user.is_superuser: images += self._get_public_images(request) final_images = [] sahara = saharaclient.client(request) image_ids = [img.id for img in sahara.images.list()] for image in images: if image.id not in image_ids: image_ids.append(image.id) final_images.append(image) return [image for image in final_images if image.container_format not in ('aki', 'ari')]
def handle_internal(self, request, context): result = "" sahara = saharaclient.client(request) bin_id = context["job_binary_internal"] if (bin_id == self.UPLOAD_BIN): result = sahara.job_binary_internals.create( self.get_unique_binary_name( request, request.FILES["job_binary_file"].name), request.FILES["job_binary_file"].read()) elif (bin_id == self.NEW_SCRIPT): result = sahara.job_binary_internals.create( self.get_unique_binary_name(request, context["job_binary_script_name"]), context["job_binary_script"]) bin_id = result.id return "internal-db://%s" % bin_id
def _get_available_images(self, request): images = self._get_tenant_images(request) if request.user.is_superuser: images += self._get_public_images(request) final_images = [] sahara = saharaclient.client(request) image_ids = [img.id for img in sahara.images.list()] for image in images: if image.id not in image_ids: image_ids.append(image.id) final_images.append(image) return [ image for image in final_images if image.container_format not in ('aki', 'ari') ]
def __init__(self, request, context_seed, entry_point, *args, **kwargs): ConfigureClusterTemplate._cls_registry = set([]) sahara = saharaclient.client(request) hlps = helpers.Helpers(sahara) plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) general_parameters = hlps.get_cluster_general_configs( plugin, hadoop_version) service_parameters = hlps.get_targeted_cluster_configs( plugin, hadoop_version) self._populate_tabs(general_parameters, service_parameters) super(ConfigureClusterTemplate, self).__init__(request, context_seed, entry_point, *args, **kwargs)
def __init__(self, request, *args, **kwargs): super(GeneralConfigAction, self).__init__(request, *args, **kwargs) sahara = saharaclient.client(request) hlps = helpers.Helpers(sahara) plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) process_choices = [] version_details = sahara.plugins.get_version_details( plugin, hadoop_version) for service, processes in version_details.node_processes.items(): for process in processes: process_choices.append( (str(service) + ":" + str(process), process)) if not saharaclient.AUTO_ASSIGNMENT_ENABLED: pools = network.floating_ip_pools_list(request) pool_choices = [(pool.id, pool.name) for pool in pools] pool_choices.insert(0, (None, "Do not assign floating IPs")) self.fields['floating_ip_pool'] = forms.ChoiceField( label=_("Floating IP pool"), choices=pool_choices, required=False) self.fields["processes"] = forms.MultipleChoiceField( label=_("Processes"), required=True, widget=forms.CheckboxSelectMultiple(), help_text=_("Processes to be launched in node group"), choices=process_choices) self.fields["plugin_name"] = forms.CharField( widget=forms.HiddenInput(), initial=plugin) self.fields["hadoop_version"] = forms.CharField( widget=forms.HiddenInput(), initial=hadoop_version) node_parameters = hlps.get_general_node_group_configs( plugin, hadoop_version) for param in node_parameters: self.fields[param.name] = whelpers.build_control(param)
def handle(self, request, data): try: sahara = saharaclient.client(request) image_id = data['image_id'] user_name = data['user_name'] desc = data['description'] sahara.images.update_image(image_id, user_name, desc) image_tags = json.loads(data["tags_list"]) sahara.images.update_tags(image_id, image_tags) messages.success(request, self.message) return True except api_base.APIException as e: messages.error(request, str(e)) return False except Exception: exceptions.handle(request)
def handle(self, request, data): try: # we can set a limit on file size, but should we? filecontent = self.files['template_file'].read() plugin_name = data['plugin_name'] hadoop_version = data.get(plugin_name + "_version") sahara = saharaclient.client(request) sahara.plugins.convert_to_cluster_template(plugin_name, hadoop_version, data['template_name'], filecontent) return True except api_base.APIException as e: messages.error(request, str(e)) return False except Exception as e: messages.error(request, str(e)) return True
def populate_cluster_template_choices(self, request, context): sahara = saharaclient.client(request) templates = sahara.cluster_templates.list() plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) choices = [(template.id, template.name) for template in templates if (template.hadoop_version == hadoop_version and template.plugin_name == plugin)] # cluster_template_id comes from cluster templates table, when # Create Cluster from template is clicked there selected_template_id = request.REQUEST.get("cluster_template_id", None) for template in templates: if template.id == selected_template_id: self.fields['cluster_template'].initial = template.id return choices
def handle(self, request, context): try: sahara = saharaclient.client(request) extra = {} bin_url = "%s://%s" % (context["job_binary_type"], context["job_binary_url"]) if (context["job_binary_type"] == "internal-db"): bin_url = self.handle_internal(request, context) elif (context["job_binary_type"] == "swift"): extra = self.handle_swift(request, context) sahara.job_binaries.create(context["job_binary_name"], bin_url, context["job_binary_description"], extra) messages.success(request, "Successfully created job binary") return True except api_base.APIException as e: messages.error(request, str(e)) return False except Exception as e: messages.error(request, str(e)) return False
def handle(self, request, context): try: sahara = saharaclient.client(request) processes = [] for service_process in context["general_processes"]: processes.append(str(service_process).split(":")[1]) configs_dict = whelpers.parse_configs_from_context(context, self.defaults) plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) volumes_per_node = None volumes_size = None if context["general_storage"] == "cinder_volume": volumes_per_node = context["general_volumes_per_node"] volumes_size = context["general_volumes_size"] sahara.node_group_templates.create( name=context["general_nodegroup_name"], plugin_name=plugin, hadoop_version=hadoop_version, description=context["general_description"], flavor_id=context["general_flavor"], volumes_per_node=volumes_per_node, volumes_size=volumes_size, node_processes=processes, node_configs=configs_dict, floating_ip_pool=context.get("general_floating_ip_pool", None)) return True except api_base.APIException as e: self.error_description = str(e) return False except Exception: exceptions.handle(request)
def handle(self, request, context): try: sahara = saharaclient.client(request) processes = [] for service_process in context["general_processes"]: processes.append(str(service_process).split(":")[1]) configs_dict = whelpers.parse_configs_from_context( context, self.defaults) plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) volumes_per_node = None volumes_size = None if context["general_storage"] == "cinder_volume": volumes_per_node = context["general_volumes_per_node"] volumes_size = context["general_volumes_size"] sahara.node_group_templates.create( name=context["general_nodegroup_name"], plugin_name=plugin, hadoop_version=hadoop_version, description=context["general_description"], flavor_id=context["general_flavor"], volumes_per_node=volumes_per_node, volumes_size=volumes_size, node_processes=processes, node_configs=configs_dict, floating_ip_pool=context.get("general_floating_ip_pool", None)) return True except api_base.APIException as e: self.error_description = str(e) return False except Exception: exceptions.handle(request)
def handle(self, request, context): try: sahara = saharaclient.client(request) extra = {} bin_url = "%s://%s" % (context["job_binary_type"], context["job_binary_url"]) if(context["job_binary_type"] == "internal-db"): bin_url = self.handle_internal(request, context) elif(context["job_binary_type"] == "swift"): extra = self.handle_swift(request, context) sahara.job_binaries.create( context["job_binary_name"], bin_url, context["job_binary_description"], extra) messages.success(request, "Successfully created job binary") return True except api_base.APIException as e: messages.error(request, str(e)) return False except Exception as e: messages.error(request, str(e)) return False
def __init__(self, request, *args, **kwargs): super(SelectPluginAction, self).__init__(request, *args, **kwargs) sahara = saharaclient.client(request) plugins = sahara.plugins.list() plugin_choices = [(plugin.name, plugin.title) for plugin in plugins] self.fields["plugin_name"] = forms.ChoiceField( label=_("Plugin name"), required=True, choices=plugin_choices, widget=forms.Select(attrs={"class": "plugin_name_choice"})) for plugin in plugins: field_name = plugin.name + "_version" choice_field = forms.ChoiceField( label=_("Hadoop version"), required=True, choices=[(version, version) for version in plugin.versions], widget=forms.Select( attrs={"class": "plugin_version_choice " + field_name + "_choice"}) ) self.fields[field_name] = choice_field
def handle(self, request, context): try: sahara = saharaclient.client(request) node_groups = [] configs_dict = whelpers.parse_configs_from_context(context, self.defaults) ids = json.loads(context['ng_forms_ids']) for id in ids: name = context['ng_group_name_' + str(id)] template_id = context['ng_template_id_' + str(id)] count = context['ng_count_' + str(id)] ng = {"name": name, "node_group_template_id": template_id, "count": count} node_groups.append(ng) plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) # TODO(nkonovalov): Fix client to support default_image_id sahara.cluster_templates.create( context["general_cluster_template_name"], plugin, hadoop_version, context["general_description"], configs_dict, node_groups, context["anti_affinity_info"]) return True except api_base.APIException as e: self.error_description = str(e) return False except Exception: exceptions.handle(request)
def __init__(self, request, *args, **kwargs): super(SelectPluginAction, self).__init__(request, *args, **kwargs) sahara = saharaclient.client(request) self._generate_plugin_version_fields(sahara)