def create_component(self, staged_component_path, application_name,
                         user_name, component, properties):
        logging.debug("create_component: %s %s %s %s %s", application_name,
                      user_name, json.dumps(component), properties,
                      staged_component_path)
        remote_component_tmp_path = '%s/%s/%s' % ('/tmp/%s' % self._namespace,
                                                  application_name,
                                                  component['component_name'])
        remote_component_install_path = '%s/%s/%s' % (
            '/opt/%s' % self._namespace, application_name,
            component['component_name'])
        service_name = '%s-%s-%s' % (self._namespace, application_name,
                                     component['component_name'])

        if 'component_spark_version' not in properties:
            properties['component_spark_version'] = '1'
        if 'component_spark_submit_args' not in properties:
            properties['component_spark_submit_args'] = ''
        if 'component_py_files' not in properties:
            properties['component_py_files'] = ''

        if 'upstart.conf' in component['component_detail']:
            # old style applications - reject these
            raise Exception(
                'Support for user supplied upstart.conf files has been deprecated, '
                + 'the deployment manager will supply one automatically. ' +
                'Please see PNDA example-applications for usage.')
        else:
            # new style applications that don't need to provide upstart.conf or yarn-kill.py
            if 'component_main_jar' in properties and 'component_main_class' not in properties:
                raise Exception(
                    'properties.json must contain "main_class" for %s sparkStreaming %s'
                    % (application_name, component['component_name']))

            java_app = None
            if 'component_main_jar' in properties:
                java_app = True
            elif 'component_main_py' in properties:
                java_app = False
            else:
                raise Exception(
                    'properties.json must contain "main_jar or main_py" for %s sparkStreaming %s'
                    % (application_name, component['component_name']))

            this_dir = os.path.dirname(os.path.realpath(__file__))
            service_script = 'systemd.service.tpl' if java_app else 'systemd.service.py.tpl'
            service_script_install_path = '/usr/lib/systemd/system/%s.service' % service_name
            if 'component_respawn_type' not in properties:
                properties['component_respawn_type'] = 'always'
            if 'component_respawn_timeout_sec' not in properties:
                properties['component_respawn_timeout_sec'] = '2'
            copy(os.path.join(this_dir, service_script), staged_component_path)

        self._fill_properties(
            os.path.join(staged_component_path, service_script), properties)
        self._fill_properties(
            os.path.join(staged_component_path, 'log4j.properties'),
            properties)
        self._fill_properties(
            os.path.join(staged_component_path, 'application.properties'),
            properties)
        mkdircommands = []
        mkdircommands.append('mkdir -p %s' % remote_component_tmp_path)
        mkdircommands.append('mkdir -p %s' % remote_component_install_path)
        logging.debug("mkdircommands are %s", mkdircommands)
        deployer_utils.exec_cmds(mkdircommands)
        logging.debug("Staged Component Path is: %s", staged_component_path)

        os.system("cp %s %s" %
                  (staged_component_path + '/*', remote_component_tmp_path))

        commands = []
        os.system('cp %s/%s %s' % (remote_component_tmp_path, service_script,
                                   service_script_install_path))
        os.system(
            'cp %s %s' %
            (remote_component_tmp_path + '/*', remote_component_install_path))

        if 'component_main_jar' in properties:
            commands.append('cp %s && jar uf %s application.properties' %
                            (remote_component_install_path,
                             properties['component_main_jar']))
        logging.debug("commands are : %s", commands)
        deployer_utils.exec_cmds(commands)
        app_path = staged_component_path.split('/')[:5]
        jar_path = '/'.join(app_path)

        self.create_spark_yml(jar_path, properties)
        app_removal_path = staged_component_path.split('/')[:3]

        undo_commands = []
        undo_commands.append('rm -rf %s\n' % '/'.join(app_removal_path))
        undo_commands.append('rm -rf %s\n' % remote_component_tmp_path)
        undo_commands.append('rm -rf %s\n' % remote_component_install_path)
        undo_commands.append('rm  %s\n' % service_script_install_path)
        logging.debug("uninstall commands: %s", undo_commands)
        return {'ssh': undo_commands, 'crdjson': jar_path}
 def _control_component(self, cmds):
     deployer_utils.exec_cmds(cmds)
Ejemplo n.º 3
0
 def _control_component(self, cmds):
     '''key_file = self._environment['cluster_private_key']
     root_user = self._environment['cluster_root_user']
     target_host = 'localhost'''
     deployer_utils.exec_cmds(cmds)