def core_site_xml_defaults(workdir, node_info):
    dflts = {
        'hadoop.tmp.dir': '$workdir/tmp'
    }
    base_hadoop_dflts = hcah.core_site_xml_defaults(workdir, node_info)
    base_hadoop_dflts.update(dflts)
    return base_hadoop_dflts
Beispiel #2
0
 def test_core_site_xml_defaults(self):
     node = dict(fqdn='hosty.domain.be', network='ib0', pid=1234,
             cores=4, totalcores=24, usablecores=[0, 1, 2, 3], num_nodes=1,
             memory=dict(meminfo=dict(memtotal=68719476736)))
     with patch('os.statvfs', return_value=MagicMock(f_bsize=4194304)):
         d = hca.core_site_xml_defaults('/', node)
     self.assertEqual(len(d), 8)
     self.assertEqual(d['fs.inmemory.size.mb'], 200)
     self.assertEqual(d['io.file.buffer.size'],  4194304)
     self.assertEqual(d['io.sort.factor'], 64)
     self.assertEqual(d['io.sort.mb'], 256)
 def test_core_site_xml_defaults(self):
     node = dict(fqdn='hosty.domain.be', network='ib0', pid=1234,
             cores=4, totalcores=24, usablecores=[0, 1, 2, 3], num_nodes=1,
             memory=dict(meminfo=dict(memtotal=68719476736)))
     with patch('os.statvfs', return_value=MagicMock(f_bsize=4194304)):
         d = hca.core_site_xml_defaults('/', node)
     self.assertEqual(len(d), 8)
     self.assertEqual(d['fs.inmemory.size.mb'], 200)
     self.assertEqual(d['io.file.buffer.size'],  4194304)
     self.assertEqual(d['io.sort.factor'], 64)
     self.assertEqual(d['io.sort.mb'], 256)
def core_site_xml_defaults(workdir, node_info):
    dflts = {'hadoop.tmp.dir': '$workdir/tmp'}
    base_hadoop_dflts = hcah.core_site_xml_defaults(workdir, node_info)
    base_hadoop_dflts.update(dflts)
    return base_hadoop_dflts