def test_build_defaults(self):
        env.roledefs['coordinator'] = 'a'
        env.roledefs['workers'] = ['b', 'c']
        actual_default = workers.Worker().build_all_defaults()
        expected = {
            'node.properties': {
                'node.environment': 'presto',
                'node.data-dir': '/var/lib/presto/data',
                'node.launcher-log-file': '/var/log/presto/launcher.log',
                'node.server-log-file': '/var/log/presto/server.log',
                'catalog.config-dir': '/etc/presto/catalog'
            },
            'jvm.config': [
                '-server', '-Xmx16G', '-XX:-UseBiasedLocking', '-XX:+UseG1GC',
                '-XX:G1HeapRegionSize=32M', '-XX:+ExplicitGCInvokesConcurrent',
                '-XX:+ExitOnOutOfMemoryError', '-XX:+UseGCOverheadLimit',
                '-XX:+HeapDumpOnOutOfMemoryError',
                '-XX:ReservedCodeCacheSize=512M',
                '-Djdk.attach.allowAttachSelf=true',
                '-Djdk.nio.maxCachedBufferSize=2000000',
                '-DHADOOP_USER_NAME=hive'
            ],
            'config.properties': {
                'coordinator': 'false',
                'discovery.uri': 'http://a:8080',
                'http-server.http.port': '8080',
                'query.max-memory': '50GB',
                'query.max-memory-per-node': '8GB'
            }
        }

        self.assertEqual(actual_default, expected)
 def test_get_conf(self, get_presto_conf_mock, write_mock):
     env.roledefs['coordinator'] = ['j']
     file_conf = {
         'node.properties': {
             'my-property': 'value',
             'node.environment': 'test'
         }
     }
     get_presto_conf_mock.return_value = file_conf
     expected = {
         'node.properties': {
             'my-property': 'value',
             'node.environment': 'test'
         },
         'jvm.config': [
             '-server', '-Xmx16G', '-XX:-UseBiasedLocking', '-XX:+UseG1GC',
             '-XX:G1HeapRegionSize=32M', '-XX:+ExplicitGCInvokesConcurrent',
             '-XX:+ExitOnOutOfMemoryError', '-XX:+UseGCOverheadLimit',
             '-XX:+HeapDumpOnOutOfMemoryError',
             '-XX:ReservedCodeCacheSize=512M',
             '-Djdk.attach.allowAttachSelf=true',
             '-Djdk.nio.maxCachedBufferSize=2000000',
             '-DHADOOP_USER_NAME=hive'
         ],
         'config.properties': {
             'coordinator': 'false',
             'discovery.uri': 'http://j:8080',
             'http-server.http.port': '8080',
             'query.max-memory': '50GB',
             'query.max-memory-per-node': '8GB'
         }
     }
     self.assertEqual(workers.Worker().get_conf(), expected)
    def test_build_defaults(self):
        env.roledefs['coordinator'] = 'a'
        env.roledefs['workers'] = ['b', 'c']
        actual_default = workers.Worker().build_all_defaults()
        expected = {
            'node.properties': {
                'node.environment': 'presto',
                'node.data-dir': '/var/lib/presto/data',
                'plugin.config-dir': '/etc/presto/catalog',
                'plugin.dir': '/usr/lib/presto/lib/plugin'
            },
            'jvm.config': [
                '-server', '-Xmx16G', '-XX:-UseBiasedLocking', '-XX:+UseG1GC',
                '-XX:+ExplicitGCInvokesConcurrent',
                '-XX:+HeapDumpOnOutOfMemoryError', '-XX:+UseGCOverheadLimit',
                '-XX:OnOutOfMemoryError=kill -9 %p', '-DHADOOP_USER_NAME=hive'
            ],
            'config.properties': {
                'coordinator': 'false',
                'discovery.uri': 'http://a:8080',
                'http-server.http.port': '8080',
                'query.max-memory': '50GB',
                'query.max-memory-per-node': '8GB'
            }
        }

        self.assertEqual(actual_default, expected)
 def test_worker_not_localhost(self, coord_mock, get_conf_mock, write_mock):
     get_conf_mock.return_value = {}
     coord_mock.return_value = ['localhost']
     env.roledefs['all'] = ['localhost', 'remote-host']
     self.assertRaisesRegexp(
         ConfigurationError, 'discovery.uri should not be localhost in a '
         'multi-node cluster',
         workers.Worker().get_conf)
 def test_validate_default(self):
     env.roledefs['coordinator'] = 'localhost'
     conf = workers.Worker().build_all_defaults()
     self.assertEqual(conf, workers.Worker.validate(conf))
 def test_get_conf_empty_is_default(self, get_conf_mock, write_mock):
     env.roledefs['coordinator'] = ['j']
     get_conf_mock.return_value = {}
     self.assertEqual(workers.Worker().get_conf(),
                      workers.Worker().build_all_defaults())