Example #1
0
    def __init__(self, request, *args, **kwargs):
        super(SelectNodeProcessesAction, self).__init__(
            request, *args, **kwargs)

        plugin, hadoop_version = (
            workflow_helpers.get_plugin_and_hadoop_version(request))
        node_processes = {}
        try:
            version_details = saharaclient.plugin_get_version_details(
                request, plugin, hadoop_version)
            node_processes = version_details.node_processes
        except Exception:
            exceptions.handle(request,
                              _("Unable to generate process choices."))
        process_choices = []
        for service, processes in node_processes.items():
            for process in processes:
                choice_label = str(service) + ":" + str(process)
                process_choices.append((choice_label, process))

        self.fields["processes"] = forms.MultipleChoiceField(
            label=_("Select Node Group Processes"),
            widget=CheckboxSelectMultiple(),
            choices=process_choices,
            required=True)
Example #2
0
 def get_help_text(self):
     extra = dict()
     plugin, hadoop_version = (
         workflow_helpers.get_plugin_and_hadoop_version(self.request))
     extra["plugin_name"] = plugin
     extra["hadoop_version"] = hadoop_version
     return super(GeneralConfigAction, self).get_help_text(extra)
def populate_anti_affinity_choices(self, request, context):
    try:
        sahara = saharaclient.client(request)
        plugin, version = whelpers.get_plugin_and_hadoop_version(request)

        version_details = sahara.plugins.get_version_details(plugin, version)
        process_choices = []
        for processes in version_details.node_processes.values():
            for process in processes:
                process_choices.append((process, process))

        cluster_template_id = request.REQUEST.get("cluster_template_id", None)
        if cluster_template_id is None:
            selected_processes = request.REQUEST.get("aa_groups", [])
        else:
            cluster_template = (
                sahara.cluster_templates.get(cluster_template_id))
            selected_processes = cluster_template.anti_affinity

        checked_dict = dict()

        for process in selected_processes:
            checked_dict[process] = process

        self.fields['anti_affinity'].initial = checked_dict
    except Exception:
        process_choices = []
        exceptions.handle(request,
                          _("Unable to populate anti-affinity processes."))
    return process_choices
Example #4
0
 def get_help_text(self):
     extra = dict()
     plugin, hadoop_version = (
         workflow_helpers.get_plugin_and_hadoop_version(self.request))
     extra["plugin_name"] = plugin
     extra["hadoop_version"] = hadoop_version
     return super(GeneralConfigAction, self).get_help_text(extra)
Example #5
0
    def __init__(self, request, *args, **kwargs):
        super(SelectNodeProcessesAction, self).__init__(
            request, *args, **kwargs)

        plugin, hadoop_version = (
            workflow_helpers.get_plugin_and_hadoop_version(request))
        node_processes = {}
        try:
            version_details = saharaclient.plugin_get_version_details(
                request, plugin, hadoop_version)
            node_processes = version_details.node_processes
        except Exception:
            exceptions.handle(request,
                              _("Unable to generate process choices."))
        process_choices = []
        for service, processes in node_processes.items():
            for process in processes:
                choice_label = str(service) + ":" + str(process)
                process_choices.append((choice_label, process))

        self.fields["processes"] = forms.MultipleChoiceField(
            label=_("Select Node Group Processes"),
            widget=CheckboxSelectMultiple(),
            choices=process_choices,
            required=True)
Example #6
0
    def __init__(self, request, *args, **kwargs):
        super(GeneralConfigAction, self).__init__(request, *args, **kwargs)

        hlps = helpers.Helpers(request)

        plugin, hadoop_version = (
            workflow_helpers.get_plugin_and_hadoop_version(request))
        process_choices = []
        try:
            version_details = saharaclient.plugin_get_version_details(
                request, plugin, hadoop_version)
            for service, processes in version_details.node_processes.items():
                for process in processes:
                    process_choices.append(
                        (str(service) + ":" + str(process), process))
        except Exception:
            exceptions.handle(request,
                              _("Unable to generate process choices."))

        if not saharaclient.SAHARA_AUTO_IP_ALLOCATION_ENABLED:
            pools = network.floating_ip_pools_list(request)
            pool_choices = [(pool.id, pool.name) for pool in pools]
            pool_choices.insert(0, (None, "Do not assign floating IPs"))

            self.fields['floating_ip_pool'] = forms.ChoiceField(
                label=_("Floating IP Pool"),
                choices=pool_choices,
                required=False)

        self.fields["proxygateway"] = forms.BooleanField(
            label=_("Proxy Gateway"),
            widget=forms.CheckboxInput(),
            help_text=_("Sahara will use instances of this node group to "
                        "access other cluster instances."),
            required=False)

        self.fields["processes"] = forms.MultipleChoiceField(
            label=_("Processes"),
            widget=forms.CheckboxSelectMultiple(),
            help_text=_("Processes to be launched in node group"),
            choices=process_choices)

        self.fields["plugin_name"] = forms.CharField(
            widget=forms.HiddenInput(), initial=plugin)
        self.fields["hadoop_version"] = forms.CharField(
            widget=forms.HiddenInput(), initial=hadoop_version)

        node_parameters = hlps.get_general_node_group_configs(
            plugin, hadoop_version)
        for param in node_parameters:
            self.fields[param.name] = workflow_helpers.build_control(param)

        if request.REQUEST.get("guide_template_type"):
            self.fields["guide_template_type"] = forms.CharField(
                required=False,
                widget=forms.HiddenInput(),
                initial=request.REQUEST.get("guide_template_type"))
Example #7
0
    def handle(self, request, context):
        try:
            processes = []
            for service_process in context["general_processes"]:
                processes.append(str(service_process).split(":")[1])

            configs_dict = (
                workflow_helpers.parse_configs_from_context(
                    context, self.defaults))

            plugin, hadoop_version = (
                workflow_helpers.get_plugin_and_hadoop_version(request))

            volumes_per_node = None
            volumes_size = None
            volumes_availability_zone = None
            volume_type = None
            volume_local_to_instance = False

            if context["general_storage"] == "cinder_volume":
                volumes_per_node = context["general_volumes_per_node"]
                volumes_size = context["general_volumes_size"]
                volume_type = context["general_volume_type"]
                volume_local_to_instance = \
                    context["general_volume_local_to_instance"]
                volumes_availability_zone = \
                    context["general_volumes_availability_zone"]

            saharaclient.nodegroup_template_update(
                request=request,
                ngt_id=self.template_id,
                name=context["general_nodegroup_name"],
                plugin_name=plugin,
                hadoop_version=hadoop_version,
                flavor_id=context["general_flavor"],
                description=context["general_description"],
                volumes_per_node=volumes_per_node,
                volumes_size=volumes_size,
                volume_type=volume_type,
                volume_local_to_instance=volume_local_to_instance,
                volumes_availability_zone=volumes_availability_zone,
                node_processes=processes,
                node_configs=configs_dict,
                floating_ip_pool=context.get("general_floating_ip_pool"),
                security_groups=context["security_groups"],
                auto_security_group=context["security_autogroup"],
                availability_zone=context["general_availability_zone"],
                use_autoconfig=context['general_use_autoconfig'],
                is_proxy_gateway=context["general_proxygateway"])
            return True
        except api_base.APIException as e:
            self.error_description = str(e.message)
            return False
        except Exception:
            exceptions.handle(request)
Example #8
0
    def handle(self, request, context):
        try:
            processes = []
            for service_process in context["general_processes"]:
                processes.append(str(service_process).split(":")[1])

            configs_dict = (workflow_helpers.parse_configs_from_context(
                context, self.defaults))

            plugin, hadoop_version = (
                workflow_helpers.get_plugin_and_hadoop_version(request))

            volumes_per_node = None
            volumes_size = None
            volumes_availability_zone = None

            if context["general_storage"] == "cinder_volume":
                volumes_per_node = context["general_volumes_per_node"]
                volumes_size = context["general_volumes_size"]
                volumes_availability_zone = \
                    context["general_volumes_availability_zone"]

            ngt = saharaclient.nodegroup_template_create(
                request,
                name=context["general_nodegroup_name"],
                plugin_name=plugin,
                hadoop_version=hadoop_version,
                description=context["general_description"],
                flavor_id=context["general_flavor"],
                volumes_per_node=volumes_per_node,
                volumes_size=volumes_size,
                volumes_availability_zone=volumes_availability_zone,
                node_processes=processes,
                node_configs=configs_dict,
                floating_ip_pool=context.get("general_floating_ip_pool"),
                security_groups=context["security_groups"],
                auto_security_group=context["security_autogroup"],
                is_proxy_gateway=context["general_proxygateway"],
                availability_zone=context["general_availability_zone"])

            hlps = helpers.Helpers(request)
            if hlps.is_from_guide():
                guide_type = context["general_guide_template_type"]
                request.session[guide_type +
                                "_name"] = (context["general_nodegroup_name"])
                request.session[guide_type + "_id"] = ngt.id
                self.success_url = (
                    "horizon:project:data_processing.wizard:cluster_guide")

            return True
        except api_base.APIException as e:
            self.error_description = str(e)
            return False
        except Exception:
            exceptions.handle(request)
Example #9
0
    def __init__(self, request, context_seed, entry_point, *args, **kwargs):
        hlps = helpers.Helpers(request)

        plugin, hadoop_version = workflow_helpers.get_plugin_and_hadoop_version(request)

        general_parameters = hlps.get_general_node_group_configs(plugin, hadoop_version)
        service_parameters = hlps.get_targeted_node_group_configs(plugin, hadoop_version)

        self._populate_tabs(general_parameters, service_parameters)

        super(ConfigureNodegroupTemplate, self).__init__(request, context_seed, entry_point, *args, **kwargs)
Example #10
0
    def handle(self, request, context):
        try:
            processes = []
            for service_process in context["general_processes"]:
                processes.append(str(service_process).split(":")[1])

            configs_dict = (workflow_helpers.parse_configs_from_context(
                context, self.defaults))

            plugin, hadoop_version = (
                workflow_helpers.get_plugin_and_hadoop_version(request))

            volumes_per_node = None
            volumes_size = None
            volumes_availability_zone = None
            volume_type = None
            volume_local_to_instance = False

            if context["general_storage"] == "cinder_volume":
                volumes_per_node = context["general_volumes_per_node"]
                volumes_size = context["general_volumes_size"]
                volume_type = context["general_volume_type"]
                volume_local_to_instance = \
                    context["general_volume_local_to_instance"]
                volumes_availability_zone = \
                    context["general_volumes_availability_zone"]

            saharaclient.nodegroup_template_update(
                request=request,
                ngt_id=self.template_id,
                name=context["general_nodegroup_name"],
                plugin_name=plugin,
                hadoop_version=hadoop_version,
                flavor_id=context["general_flavor"],
                description=context["general_description"],
                volumes_per_node=volumes_per_node,
                volumes_size=volumes_size,
                volume_type=volume_type,
                volume_local_to_instance=volume_local_to_instance,
                volumes_availability_zone=volumes_availability_zone,
                node_processes=processes,
                node_configs=configs_dict,
                floating_ip_pool=context.get("general_floating_ip_pool"),
                security_groups=context["security_groups"],
                auto_security_group=context["security_autogroup"],
                availability_zone=context["general_availability_zone"],
                use_autoconfig=context['general_use_autoconfig'],
                is_proxy_gateway=context["general_proxygateway"])
            return True
        except api_base.APIException as e:
            self.error_description = str(e.message)
            return False
        except Exception:
            exceptions.handle(request)
Example #11
0
    def handle(self, request, context):
        try:
            processes = []
            for service_process in context["general_processes"]:
                processes.append(str(service_process).split(":")[1])

            configs_dict = workflow_helpers.parse_configs_from_context(context, self.defaults)

            plugin, hadoop_version = workflow_helpers.get_plugin_and_hadoop_version(request)

            volumes_per_node = None
            volumes_size = None
            volumes_availability_zone = None

            if context["general_storage"] == "cinder_volume":
                volumes_per_node = context["general_volumes_per_node"]
                volumes_size = context["general_volumes_size"]
                volumes_availability_zone = context["general_volumes_availability_zone"]

            ngt = saharaclient.nodegroup_template_create(
                request,
                name=context["general_nodegroup_name"],
                plugin_name=plugin,
                hadoop_version=hadoop_version,
                description=context["general_description"],
                flavor_id=context["general_flavor"],
                volumes_per_node=volumes_per_node,
                volumes_size=volumes_size,
                volumes_availability_zone=volumes_availability_zone,
                node_processes=processes,
                node_configs=configs_dict,
                floating_ip_pool=context.get("general_floating_ip_pool"),
                security_groups=context["security_groups"],
                auto_security_group=context["security_autogroup"],
                is_proxy_gateway=context["general_proxygateway"],
                availability_zone=context["general_availability_zone"],
            )

            hlps = helpers.Helpers(request)
            if hlps.is_from_guide():
                guide_type = context["general_guide_template_type"]
                request.session[guide_type + "_name"] = context["general_nodegroup_name"]
                request.session[guide_type + "_id"] = ngt.id
                self.success_url = "horizon:project:data_processing.wizard:cluster_guide"

            return True
        except api_base.APIException as e:
            self.error_description = str(e)
            return False
        except Exception:
            exceptions.handle(request)
Example #12
0
    def __init__(self, request, *args, **kwargs):
        super(GeneralConfigAction, self).__init__(request, *args, **kwargs)

        hlps = helpers.Helpers(request)

        plugin, hadoop_version = workflow_helpers.get_plugin_and_hadoop_version(request)
        process_choices = []
        try:
            version_details = saharaclient.plugin_get_version_details(request, plugin, hadoop_version)
            for service, processes in version_details.node_processes.items():
                for process in processes:
                    process_choices.append((str(service) + ":" + str(process), process))
        except Exception:
            exceptions.handle(request, _("Unable to generate process choices."))

        if not saharaclient.SAHARA_AUTO_IP_ALLOCATION_ENABLED:
            pools = network.floating_ip_pools_list(request)
            pool_choices = [(pool.id, pool.name) for pool in pools]
            pool_choices.insert(0, (None, "Do not assign floating IPs"))

            self.fields["floating_ip_pool"] = forms.ChoiceField(
                label=_("Floating IP Pool"), choices=pool_choices, required=False
            )

        self.fields["proxygateway"] = forms.BooleanField(
            label=_("Proxy Gateway"),
            widget=forms.CheckboxInput(),
            help_text=_("Sahara will use instances of this node group to " "access other cluster instances."),
            required=False,
        )

        self.fields["processes"] = forms.MultipleChoiceField(
            label=_("Processes"),
            widget=forms.CheckboxSelectMultiple(),
            help_text=_("Processes to be launched in node group"),
            choices=process_choices,
        )

        self.fields["plugin_name"] = forms.CharField(widget=forms.HiddenInput(), initial=plugin)
        self.fields["hadoop_version"] = forms.CharField(widget=forms.HiddenInput(), initial=hadoop_version)

        node_parameters = hlps.get_general_node_group_configs(plugin, hadoop_version)
        for param in node_parameters:
            self.fields[param.name] = workflow_helpers.build_control(param)

        if request.REQUEST.get("guide_template_type"):
            self.fields["guide_template_type"] = forms.CharField(
                required=False, widget=forms.HiddenInput(), initial=request.REQUEST.get("guide_template_type")
            )
Example #13
0
    def __init__(self, request, context_seed, entry_point, *args, **kwargs):
        hlps = helpers.Helpers(request)

        plugin, hadoop_version = (
            workflow_helpers.get_plugin_and_hadoop_version(request))

        general_parameters = hlps.get_general_node_group_configs(
            plugin, hadoop_version)
        service_parameters = hlps.get_targeted_node_group_configs(
            plugin, hadoop_version)

        self._populate_tabs(general_parameters, service_parameters)

        super(ConfigureNodegroupTemplate,
              self).__init__(request, context_seed, entry_point, *args,
                             **kwargs)
Example #14
0
    def handle(self, request, context):
        node_groups = None

        plugin, hadoop_version = (
            whelpers.get_plugin_and_hadoop_version(request))

        ct_id = context["cluster_general_cluster_template"] or None
        user_keypair = context["cluster_general_keypair"] or None

        argument_ids = json.loads(context['argument_ids'])
        interface = {name: context["argument_" + str(arg_id)]
                     for arg_id, name in argument_ids.items()}

        try:
            cluster = saharaclient.cluster_create(
                request,
                context["cluster_general_cluster_name"],
                plugin, hadoop_version,
                cluster_template_id=ct_id,
                default_image_id=context["cluster_general_image"],
                description=context["cluster_general_description"],
                node_groups=node_groups,
                user_keypair_id=user_keypair,
                is_transient=not(context["cluster_general_persist_cluster"]),
                net_id=context.get(
                    "cluster_general_neutron_management_network",
                    None))
        except Exception:
            exceptions.handle(request,
                              _("Unable to create new cluster for job."))
            return False

        try:
            saharaclient.job_execution_create(
                request,
                context["job_general_job"],
                cluster.id,
                context["job_general_job_input"],
                context["job_general_job_output"],
                context["job_config"],
                interface)
        except Exception:
            exceptions.handle(request,
                              _("Unable to launch job."))
            return False
        return True
Example #15
0
    def handle(self, request, context):
        node_groups = None

        plugin, hadoop_version = (
            whelpers.get_plugin_and_hadoop_version(request))

        ct_id = context["cluster_general_cluster_template"] or None
        user_keypair = context["cluster_general_keypair"] or None

        argument_ids = json.loads(context['argument_ids'])
        interface = {
            name: context["argument_" + str(arg_id)]
            for arg_id, name in argument_ids.items()
        }

        try:
            cluster = saharaclient.cluster_create(
                request,
                context["cluster_general_cluster_name"],
                plugin,
                hadoop_version,
                cluster_template_id=ct_id,
                default_image_id=context["cluster_general_image"],
                description=context["cluster_general_description"],
                node_groups=node_groups,
                user_keypair_id=user_keypair,
                is_transient=not (context["cluster_general_persist_cluster"]),
                net_id=context.get(
                    "cluster_general_neutron_management_network", None))
        except Exception:
            exceptions.handle(request,
                              _("Unable to create new cluster for job."))
            return False

        try:
            saharaclient.job_execution_create(
                request, context["job_general_job"], cluster.id,
                context["job_general_job_input"],
                context["job_general_job_output"], context["job_config"],
                interface)
        except Exception:
            exceptions.handle(request, _("Unable to launch job."))
            return False
        return True
Example #16
0
    def __init__(self, request, *args, **kwargs):
        super(GeneralConfigAction, self).__init__(request, *args, **kwargs)

        hlps = helpers.Helpers(request)

        plugin, hadoop_version = (
            workflow_helpers.get_plugin_and_hadoop_version(request))

        if not saharaclient.SAHARA_AUTO_IP_ALLOCATION_ENABLED:
            pools = network.floating_ip_pools_list(request)
            pool_choices = [(pool.id, pool.name) for pool in pools]
            pool_choices.insert(0, (None, "Do not assign floating IPs"))

            self.fields['floating_ip_pool'] = forms.ChoiceField(
                label=_("Floating IP Pool"),
                choices=pool_choices,
                required=False)

        self.fields["use_autoconfig"] = forms.BooleanField(
            label=_("Auto-configure"),
            help_text=_("If selected, instances of a node group will be "
                        "automatically configured during cluster "
                        "creation. Otherwise you should manually specify "
                        "configuration values."),
            required=False,
            widget=forms.CheckboxInput(),
            initial=True,
        )

        self.fields["proxygateway"] = forms.BooleanField(
            label=_("Proxy Gateway"),
            widget=forms.CheckboxInput(),
            help_text=_("Sahara will use instances of this node group to "
                        "access other cluster instances."),
            required=False)

        self.fields["plugin_name"] = forms.CharField(
            widget=forms.HiddenInput(),
            initial=plugin
        )
        self.fields["hadoop_version"] = forms.CharField(
            widget=forms.HiddenInput(),
            initial=hadoop_version
        )
        node_parameters = hlps.get_general_node_group_configs(plugin,
                                                              hadoop_version)
        for param in node_parameters:
            self.fields[param.name] = workflow_helpers.build_control(param)

        if request.REQUEST.get("guide_template_type"):
            self.fields["guide_template_type"] = forms.CharField(
                required=False,
                widget=forms.HiddenInput(),
                initial=request.REQUEST.get("guide_template_type"))

        try:
            volume_types = cinder.volume_type_list(request)
        except Exception:
            exceptions.handle(request,
                              _("Unable to get volume type list."))

        self.fields['volume_type'].choices = [(None, _("No volume type"))] + \
                                             [(type.name, type.name)
                                              for type in volume_types]
Example #17
0
    def __init__(self, request, *args, **kwargs):
        super(GeneralConfigAction, self).__init__(request, *args, **kwargs)

        hlps = helpers.Helpers(request)

        plugin, hadoop_version = (
            workflow_helpers.get_plugin_and_hadoop_version(request))

        if not saharaclient.SAHARA_AUTO_IP_ALLOCATION_ENABLED:
            pools = network.floating_ip_pools_list(request)
            pool_choices = [(pool.id, pool.name) for pool in pools]
            pool_choices.insert(0, (None, "Do not assign floating IPs"))

            self.fields['floating_ip_pool'] = forms.ChoiceField(
                label=_("Floating IP Pool"),
                choices=pool_choices,
                required=False)

        self.fields["use_autoconfig"] = forms.BooleanField(
            label=_("Auto-configure"),
            help_text=_("If selected, instances of a node group will be "
                        "automatically configured during cluster "
                        "creation. Otherwise you should manually specify "
                        "configuration values."),
            required=False,
            widget=forms.CheckboxInput(),
            initial=True,
        )

        self.fields["proxygateway"] = forms.BooleanField(
            label=_("Proxy Gateway"),
            widget=forms.CheckboxInput(),
            help_text=_("Sahara will use instances of this node group to "
                        "access other cluster instances."),
            required=False)

        self.fields["plugin_name"] = forms.CharField(
            widget=forms.HiddenInput(),
            initial=plugin
        )
        self.fields["hadoop_version"] = forms.CharField(
            widget=forms.HiddenInput(),
            initial=hadoop_version
        )
        node_parameters = hlps.get_general_node_group_configs(plugin,
                                                              hadoop_version)
        for param in node_parameters:
            self.fields[param.name] = workflow_helpers.build_control(param)

        if request.REQUEST.get("guide_template_type"):
            self.fields["guide_template_type"] = forms.CharField(
                required=False,
                widget=forms.HiddenInput(),
                initial=request.REQUEST.get("guide_template_type"))

        try:
            volume_types = cinder.volume_type_list(request)
        except Exception:
            exceptions.handle(request,
                              _("Unable to get volume type list."))

        self.fields['volume_type'].choices = [(None, _("No volume type"))] + \
                                             [(type.name, type.name)
                                              for type in volume_types]