コード例 #1
0
    def get_config_files(self, cluster_context, configs, instance=None):
        livy_conf_template = 'plugins/mapr/services/hue/' \
                             'resources/livy_conf_%s.template'
        livy_conf = bcf.TemplateFile("livy.conf")
        livy_conf.parse(
            utils.get_file_text(livy_conf_template % self.version,
                                'sahara_plugins'))
        livy_conf.remote_path = self.home_dir(cluster_context) + '/conf'

        livy_sh_template = 'plugins/mapr/services/hue/' \
                           'resources/livy_sh_%s.template'
        livy_sh = bcf.TemplateFile("livy-env.sh")
        livy_sh.remote_path = self.home_dir(cluster_context) + '/conf'
        livy_sh.parse(
            utils.get_file_text(livy_sh_template % self.version,
                                'sahara_plugins'))
        livy_sh.add_property('hadoop_version', cluster_context.hadoop_version)
        livy_sh.add_property('spark_version', spark.SparkOnYarnV201().version)
        livy_sh.mode = 777

        hue_instances = cluster_context.get_instances(HUE)
        for instance in hue_instances:
            if instance not in cluster_context.changed_instances():
                cluster_context.should_be_restarted[self] += [instance]

        return [livy_sh, livy_conf]
コード例 #2
0
    def get_config_files(self, cluster_context, configs, instance=None):
        template = 'plugins/mapr/services/hue/resources/hue_%s.template'
        # hue.ini
        hue_ini = bcf.TemplateFile("hue.ini")
        hue_ini.remote_path = self.conf_dir(cluster_context)
        hue_ini.parse(
            utils.get_file_text(template % self.version, 'sahara_plugins'))
        hue_ini.add_properties(self._get_hue_ini_props(cluster_context))
        hue_ini.add_property("thrift_version",
                             configs[self.THRIFT_VERSION.name])

        # # hue.sh
        hue_sh_template = 'plugins/mapr/services/hue/' \
                          'resources/hue_sh_%s.template'
        hue_sh = bcf.TemplateFile("hue.sh")
        hue_sh.remote_path = self.home_dir(cluster_context) + '/bin'
        hue_sh.parse(
            utils.get_file_text(hue_sh_template % self.version,
                                'sahara_plugins'))
        hue_sh.add_property('hadoop_version', cluster_context.hadoop_version)
        hue_sh.mode = 777

        hue_instances = cluster_context.get_instances(HUE)
        for instance in hue_instances:
            if instance not in cluster_context.changed_instances():
                cluster_context.should_be_restarted[self] += [instance]

        return [hue_ini, hue_sh]
コード例 #3
0
 def test_render(self):
     foo = conf_f.TemplateFile('foo')
     expected = {'value1': 'value1', 'value2': 'value2'}
     foo.parse(self.content)
     foo._config_dict = expected
     actual = foo.render()
     self.assertEqual(self.rendered, actual)
コード例 #4
0
    def get_config_files(self, cluster_context, configs, instance=None):
        hbase_version = self._get_hbase_version(cluster_context)
        hive_version = self._get_hive_version(cluster_context)
        # spark-env-sh
        template = 'plugins/mapr/services/' \
                   'spark/resources/spark-env.template'
        env_sh = bcf.TemplateFile('spark-env.sh')
        env_sh.remote_path = self.conf_dir(cluster_context)
        env_sh.parse(utils.get_file_text(template, 'sahara_plugins'))
        env_sh.add_property('version', self.version)

        # spark-defaults
        conf = bcf.PropertiesFile('spark-defaults.conf', separator=' ')
        conf.remote_path = self.conf_dir(cluster_context)
        if instance:
            conf.fetch(instance)

        # compatibility.version
        versions = bcf.PropertiesFile('compatibility.version')
        versions.remote_path = self.home_dir(cluster_context) + '/mapr-util'
        if instance:
            versions.fetch(instance)

        if hive_version:
            versions.add_property('hive_versions', hive_version + '.0')
            conf.add_properties(self._hive_properties(cluster_context))
        if hbase_version:
            versions.add_property('hbase_versions', hbase_version)
            conf.add_property(
                'spark.executor.extraClassPath', '%s/lib/*' %
                self._hbase(cluster_context).home_dir(cluster_context))
        return [conf, versions, env_sh]
コード例 #5
0
    def get_config_files(self, cluster_context, configs, instance=None):
        defaults = 'plugins/mapr/services/impala/resources/impala-env.sh.j2'

        impala_env = bcf.TemplateFile("env.sh")
        impala_env.remote_path = self.conf_dir(cluster_context)
        if instance:
            impala_env.fetch(instance)
        impala_env.parse(utils.get_file_text(defaults, 'sahara_plugins'))
        impala_env.add_properties(self._get_impala_env_props(cluster_context))
        sentry_host = cluster_context.get_instance(sentry.SENTRY)
        if sentry_host:
            sentry_mode = cluster_context._get_cluster_config_value(
                sentry.Sentry().SENTRY_STORAGE_MODE)
            ui_name = sentry.Sentry().ui_name
            sentry_version = cluster_context.get_chosen_service_version(
                ui_name)
            sentry_service = cluster_context. \
                _find_service_instance(ui_name, sentry_version)
            if sentry_service.supports(self, sentry_mode):
                impala_env.add_properties({
                    'sentry_home':
                    sentry_service.home_dir(cluster_context),
                    'sentry_db':
                    sentry_mode == sentry.DB_STORAGE_SENTRY_MODE,
                    'sentry_policy_file':
                    'maprfs://' + sentry_service.GLOBAL_POLICY_FILE,
                })
        return [impala_env]
コード例 #6
0
ファイル: mysql.py プロジェクト: tellesnobrega/sahara-plugins
 def _create_script_obj(filename, template, **kwargs):
     script = cf.TemplateFile(filename)
     script.remote_path = '/tmp/'
     script.parse(
         u.get_file_text(
             'plugins/mapr/services/mysql/resources/%s' % template,
             'sahara_plugins'))
     for k, v in six.iteritems(kwargs):
         script.add_property(k, v)
     return script
コード例 #7
0
    def test_add_property(self):
        foo = conf_f.TemplateFile('foo')
        expected = {}
        self.assertEqual(expected, foo._config_dict)

        foo.add_property('key1', 'value1')
        expected = {'key1': 'value1'}
        self.assertEqual(expected, foo._config_dict)

        foo.add_property('key2', 'value2')
        expected = {'key1': 'value1', 'key2': 'value2'}
        self.assertEqual(expected, foo._config_dict)
コード例 #8
0
 def get_config_files(self, cluster_context, configs, instance=None):
     sentry_default = \
         'plugins/mapr/services/sentry/resources/sentry-default.xml'
     global_policy_template = \
         'plugins/mapr/services/sentry/resources/global-policy.ini'
     sentry_site = cf.HadoopXML('sentry-site.xml')
     sentry_site.remote_path = self.conf_dir(cluster_context)
     if instance:
         sentry_site.fetch(instance)
     sentry_site.load_properties(configs)
     sentry_mode = configs[self.SENTRY_STORAGE_MODE.name]
     sentry_site.parse(utils.get_file_text(sentry_default,
                                           'sahara_plugins'))
     sentry_site.add_properties(
         self._get_sentry_site_props(cluster_context, sentry_mode))
     global_policy = cf.TemplateFile('global-policy.ini')
     global_policy.remote_path = self.conf_dir(cluster_context)
     global_policy.parse(
         utils.get_file_text(global_policy_template, 'sahara_plugins'))
     return [sentry_site, global_policy]
コード例 #9
0
 def test_parse(self):
     foo = conf_f.TemplateFile('foo')
     foo.parse(self.content)
     self.assertIsNotNone(foo._template)
コード例 #10
0
 def test_remote_path(self):
     foo = conf_f.TemplateFile('foo')
     foo.remote_path = '/bar'
     self.assertEqual('/bar/foo', foo.remote_path)