def __init__(self, request, *args, **kwargs): super(SelectNodeProcessesAction, self).__init__( request, *args, **kwargs) plugin, hadoop_version = ( workflow_helpers.get_plugin_and_hadoop_version(request)) node_processes = {} try: version_details = saharaclient.plugin_get_version_details( request, plugin, hadoop_version) node_processes = version_details.node_processes except Exception: exceptions.handle(request, _("Unable to generate process choices.")) process_choices = [] for service, processes in node_processes.items(): for process in processes: choice_label = str(service) + ":" + str(process) process_choices.append((choice_label, process)) self.fields["processes"] = forms.MultipleChoiceField( label=_("Select Node Group Processes"), widget=CheckboxSelectMultiple(), choices=process_choices, required=True)
def __init__(self, request, *args, **kwargs): super(GeneralConfigAction, self).__init__(request, *args, **kwargs) hlps = helpers.Helpers(request) plugin, hadoop_version = ( workflow_helpers.get_plugin_and_hadoop_version(request)) process_choices = [] try: version_details = saharaclient.plugin_get_version_details( request, plugin, hadoop_version) for service, processes in version_details.node_processes.items(): for process in processes: process_choices.append( (str(service) + ":" + str(process), process)) except Exception: exceptions.handle(request, _("Unable to generate process choices.")) if not saharaclient.SAHARA_AUTO_IP_ALLOCATION_ENABLED: pools = network.floating_ip_pools_list(request) pool_choices = [(pool.id, pool.name) for pool in pools] pool_choices.insert(0, (None, "Do not assign floating IPs")) self.fields['floating_ip_pool'] = forms.ChoiceField( label=_("Floating IP Pool"), choices=pool_choices, required=False) self.fields["autogroup"] = forms.BooleanField( label=_("Auto Security Group"), widget=forms.CheckboxInput(), help_text=_("Create security group for this Node Group."), required=False) groups = network.security_group_list(request) security_group_list = [(sg.id, sg.name) for sg in groups] self.fields["groups"] = forms.MultipleChoiceField( label=_("Security Groups"), widget=forms.CheckboxSelectMultiple(), help_text=_("Launch instances in these security groups."), choices=security_group_list, required=False) self.fields["processes"] = forms.MultipleChoiceField( label=_("Processes"), widget=forms.CheckboxSelectMultiple(), help_text=_("Processes to be launched in node group"), choices=process_choices) self.fields["plugin_name"] = forms.CharField( widget=forms.HiddenInput(), initial=plugin) self.fields["hadoop_version"] = forms.CharField( widget=forms.HiddenInput(), initial=hadoop_version) node_parameters = hlps.get_general_node_group_configs( plugin, hadoop_version) for param in node_parameters: self.fields[param.name] = workflow_helpers.build_control(param)
def get_targeted_cluster_configs(self, plugin_name, hadoop_version): plugin = saharaclient.plugin_get_version_details(self.request, plugin_name, hadoop_version) parameters = {} for service in plugin.node_processes.keys(): parameters[service] = self._extract_parameters(plugin.configs, "cluster", service) return parameters
def __init__(self, request, *args, **kwargs): super(GeneralConfigAction, self).__init__(request, *args, **kwargs) hlps = helpers.Helpers(request) plugin, hadoop_version = ( workflow_helpers.get_plugin_and_hadoop_version(request)) process_choices = [] try: version_details = saharaclient.plugin_get_version_details( request, plugin, hadoop_version) for service, processes in version_details.node_processes.items(): for process in processes: process_choices.append( (str(service) + ":" + str(process), process)) except Exception: exceptions.handle(request, _("Unable to generate process choices.")) if not saharaclient.SAHARA_AUTO_IP_ALLOCATION_ENABLED: pools = network.floating_ip_pools_list(request) pool_choices = [(pool.id, pool.name) for pool in pools] pool_choices.insert(0, (None, "Do not assign floating IPs")) self.fields['floating_ip_pool'] = forms.ChoiceField( label=_("Floating IP Pool"), choices=pool_choices, required=False) self.fields["proxygateway"] = forms.BooleanField( label=_("Proxy Gateway"), widget=forms.CheckboxInput(), help_text=_("Sahara will use instances of this node group to " "access other cluster instances."), required=False) self.fields["processes"] = forms.MultipleChoiceField( label=_("Processes"), widget=forms.CheckboxSelectMultiple(), help_text=_("Processes to be launched in node group"), choices=process_choices) self.fields["plugin_name"] = forms.CharField( widget=forms.HiddenInput(), initial=plugin) self.fields["hadoop_version"] = forms.CharField( widget=forms.HiddenInput(), initial=hadoop_version) node_parameters = hlps.get_general_node_group_configs( plugin, hadoop_version) for param in node_parameters: self.fields[param.name] = workflow_helpers.build_control(param) if "guide_template_type" in request.resolver_match.kwargs: self.fields["guide_template_type"] = forms.CharField( required=False, widget=forms.HiddenInput(), initial=request.resolver_match.kwargs["guide_template_type"])
def __init__(self, request, context_seed, entry_point, *args, **kwargs): template_id = context_seed["template_id"] template = saharaclient.nodegroup_template_get(request, template_id) self._set_configs_to_copy(template.node_configs) plugin = template.plugin_name hadoop_version = template.hadoop_version request.GET = request.GET.copy() request.GET.update( {"plugin_name": plugin, "hadoop_version": hadoop_version}) super(CopyNodegroupTemplate, self).__init__(request, context_seed, entry_point, *args, **kwargs) for step in self.steps: if not isinstance(step, create_flow.GeneralConfig): continue fields = step.action.fields fields["nodegroup_name"].initial = template.name + "-copy" fields["description"].initial = template.description fields["flavor"].initial = template.flavor_id fields["availability_zone"].initial = template.availability_zone storage = "cinder_volume" if template.volumes_per_node > 0 \ else "ephemeral_drive" volumes_per_node = template.volumes_per_node volumes_size = template.volumes_size fields["storage"].initial = storage fields["volumes_per_node"].initial = volumes_per_node fields["volumes_size"].initial = volumes_size if template.floating_ip_pool: fields['floating_ip_pool'].initial = template.floating_ip_pool processes_dict = dict() try: plugin_details = saharaclient.plugin_get_version_details( request, plugin, hadoop_version) plugin_node_processes = plugin_details.node_processes except Exception: plugin_node_processes = dict() exceptions.handle(request, _("Unable to fetch plugin details.")) for process in template.node_processes: # need to know the service _service = None for service, processes in plugin_node_processes.items(): if process in processes: _service = service break processes_dict["%s:%s" % (_service, process)] = process fields["processes"].initial = processes_dict
def get_targeted_cluster_configs(self, plugin_name, hadoop_version): plugin = saharaclient.plugin_get_version_details( self.request, plugin_name, hadoop_version) parameters = {} for service in plugin.node_processes.keys(): parameters[service] = self._extract_parameters( plugin.configs, 'cluster', service) return parameters
def __init__(self, request, *args, **kwargs): super(GeneralConfigAction, self).__init__(request, *args, **kwargs) sahara = saharaclient.client(request) hlps = helpers.Helpers(sahara) plugin, hadoop_version = ( workflow_helpers.get_plugin_and_hadoop_version(request)) process_choices = [] try: version_details = saharaclient.plugin_get_version_details(request, plugin, hadoop_version) for service, processes in version_details.node_processes.items(): for process in processes: process_choices.append( (str(service) + ":" + str(process), process)) except Exception: exceptions.handle(request, _("Unable to generate process choices.")) if not saharaclient.SAHARA_AUTO_IP_ALLOCATION_ENABLED: pools = network.floating_ip_pools_list(request) pool_choices = [(pool.id, pool.name) for pool in pools] pool_choices.insert(0, (None, "Do not assign floating IPs")) self.fields['floating_ip_pool'] = forms.ChoiceField( label=_("Floating IP pool"), choices=pool_choices, required=False) self.fields["processes"] = forms.MultipleChoiceField( label=_("Processes"), widget=forms.CheckboxSelectMultiple(), help_text=_("Processes to be launched in node group"), choices=process_choices) self.fields["plugin_name"] = forms.CharField( widget=forms.HiddenInput(), initial=plugin ) self.fields["hadoop_version"] = forms.CharField( widget=forms.HiddenInput(), initial=hadoop_version ) node_parameters = hlps.get_general_node_group_configs(plugin, hadoop_version) for param in node_parameters: self.fields[param.name] = workflow_helpers.build_control(param)
def populate_image_choices(self, request, context): try: all_images = saharaclient.image_list(request) plugin, hadoop_version = whelpers.get_plugin_and_hadoop_version(request) details = saharaclient.plugin_get_version_details(request, plugin, hadoop_version) return [ (image.id, image.name) for image in all_images if (set(details.required_image_tags).issubset(set(image.tags))) ] except Exception: exceptions.handle(request, _("Unable to fetch image choices.")) return []
def populate_image_choices(self, request, context): try: all_images = saharaclient.image_list(request) plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) details = saharaclient.plugin_get_version_details( request, plugin, hadoop_version) return [ (image.id, image.name) for image in all_images if set(details.required_image_tags).issubset(set(image.tags)) ] except Exception: exceptions.handle(request, _("Unable to fetch image choices.")) return []
def update_context_with_plugin_tags(request, context): try: plugins = saharaclient.plugin_list(request) except Exception: plugins = [] msg = _("Unable to process plugin tags") exceptions.handle(request, msg) plugins_object = dict() for plugin in plugins: plugins_object[plugin.name] = dict() for version in plugin.versions: try: details = saharaclient.plugin_get_version_details(request, plugin.name, version) plugins_object[plugin.name][version] = details.required_image_tags except Exception: msg = _("Unable to process plugin tags") exceptions.handle(request, msg) context["plugins"] = plugins_object
def get_general_node_group_configs(self, plugin_name, hadoop_version): plugin = saharaclient.plugin_get_version_details(self.request, plugin_name, hadoop_version) return self._extract_parameters(plugin.configs, 'node', 'general')
def __init__(self, request, context_seed, entry_point, *args, **kwargs): self.template_id = context_seed["template_id"] self.template = saharaclient.nodegroup_template_get(request, self.template_id) self._set_configs_to_copy(self.template.node_configs) plugin = self.template.plugin_name hadoop_version = self.template.hadoop_version request.GET = request.GET.copy() request.GET.update( {"plugin_name": plugin, "hadoop_version": hadoop_version}) super(CopyNodegroupTemplate, self).__init__(request, context_seed, entry_point, *args, **kwargs) g_fields = None snp_fields = None s_fields = None for step in self.steps: if isinstance(step, create_flow.GeneralConfig): g_fields = step.action.fields if isinstance(step, create_flow.SecurityConfig): s_fields = step.action.fields if isinstance(step, create_flow.SelectNodeProcesses): snp_fields = step.action.fields g_fields["nodegroup_name"].initial = self.template.name + "-copy" g_fields["description"].initial = self.template.description g_fields["flavor"].initial = self.template.flavor_id if hasattr(self.template, "availability_zone"): g_fields["availability_zone"].initial = ( self.template.availability_zone) if hasattr(self.template, "volumes_availability_zone"): g_fields["volumes_availability_zone"].initial = \ self.template.volumes_availability_zone storage = "cinder_volume" if self.template.volumes_per_node > 0 \ else "ephemeral_drive" volumes_per_node = self.template.volumes_per_node volumes_size = self.template.volumes_size g_fields["storage"].initial = storage g_fields["volumes_per_node"].initial = volumes_per_node g_fields["volumes_size"].initial = volumes_size g_fields["volumes_availability_zone"].initial = \ self.template.volumes_availability_zone if self.template.floating_ip_pool: g_fields['floating_ip_pool'].initial = ( self.template.floating_ip_pool) s_fields["security_autogroup"].initial = ( self.template.auto_security_group) if self.template.security_groups: s_fields["security_groups"].initial = dict( [(sg, sg) for sg in self.template.security_groups]) processes_dict = dict() try: plugin_details = saharaclient.plugin_get_version_details( request, plugin, hadoop_version) plugin_node_processes = plugin_details.node_processes except Exception: plugin_node_processes = dict() exceptions.handle(request, _("Unable to fetch plugin details.")) for process in self.template.node_processes: # need to know the service _service = None for service, processes in plugin_node_processes.items(): if process in processes: _service = service break processes_dict["%s:%s" % (_service, process)] = process snp_fields["processes"].initial = processes_dict
def get_node_processes(self, plugin_name, hadoop_version): plugin = saharaclient.plugin_get_version_details(self.request, plugin_name, hadoop_version) return self._get_node_processes(plugin)
def get_general_node_group_configs(self, plugin_name, hadoop_version): plugin = saharaclient.plugin_get_version_details(self.request, plugin_name, hadoop_version) return self._extract_parameters(plugin.configs, "node", "general")
def get_cluster_general_configs(self, plugin_name, hadoop_version): plugin = saharaclient.plugin_get_version_details(self.request, plugin_name, hadoop_version) return self._extract_parameters(plugin.configs, 'cluster', "general")