def test_render(self): foo = conf_f.EnvironmentConfig('foo') expected = {'ke1': 'value1', 'key2': 'value2'} foo._config_dict = expected actual = foo.render() bar = conf_f.EnvironmentConfig('bar') bar.parse(actual) self.assertDictEqual(expected, bar._config_dict)
def test_render_extra_properties(self): foo = conf_f.EnvironmentConfig('foo') foo.parse(self.content) foo.add_property('key3', 'value3') foo_content = foo.render() bar = conf_f.EnvironmentConfig('bar') bar.parse(foo_content) expected = {'key1': 'value1', 'key2': 'value2', 'key3': 'value3'} self.assertDictEqual(expected, bar._config_dict)
def test_add_property(self): foo = conf_f.EnvironmentConfig('foo') self.assertDictEqual({}, foo._config_dict) foo.add_property('key1', 'value1') self.assertDictEqual({'key1': 'value1'}, foo._config_dict) foo.add_property('key2', 'value2') expected = {'key1': 'value1', 'key2': 'value2'} self.assertDictEqual(expected, foo._config_dict)
def get_config_files(self, cluster_context, configs, instance=None): env = bcf.EnvironmentConfig('spark-env.sh') env.remote_path = self.conf_dir(cluster_context) if instance: env.fetch(instance) env.load_properties(configs) env.add_properties(self._get_spark_ha_props(cluster_context)) env.add_property('SPARK_WORKER_DIR', '/tmp/spark') return [env]
def test_get_config_value(self): foo = conf_f.EnvironmentConfig('foo') foo._config_dict = {'foo': 'bar'} self.assertEqual('bar', foo._get_config_value('foo')) self.assertIsNone(foo._get_config_value('bar'))
def test_parse(self): foo = conf_f.EnvironmentConfig('foo') foo.parse(self.content) expected = {'key1': 'value1', 'key2': 'value2'} self.assertDictEqual(expected, foo._config_dict)
def test_remote_path(self): foo = conf_f.EnvironmentConfig('foo') foo.remote_path = '/bar' self.assertEqual('/bar/foo', foo.remote_path)