def test_available_memory_one_third(self): total_mem = 68719476736 node = dict(fqdn='hosty.domain.be', network='ib0', pid=1234, cores=8, totalcores=24, usablecores=range(24), num_nodes=1, memory=dict(meminfo=dict(memtotal=total_mem), ulimit='unlimited')) avail = total_mem - hcc.parse_memory('8g') self.assertEqual(hcc.available_memory(node), int(avail * 1./3)) node['cores'] = 12 self.assertEqual(hcc.available_memory(node), int(avail * 1./2)) node['cores'] = 1 self.assertEqual(hcc.available_memory(node), int(avail * 1./24))
def test_available_memory_entire_machine(self): total_mem = 68719476736 node = dict(fqdn='hosty.domain.be', network='ib0', pid=1234, cores=24, totalcores=24, usablecores=range(24), num_nodes=1, memory=dict(meminfo=dict(memtotal=total_mem), ulimit='unlimited')) avail = total_mem - hcc.parse_memory('8g') self.assertEqual(hcc.available_memory(node), avail)
def test_available_memory_one_third(self): total_mem = 68719476736 node = dict(fqdn='hosty.domain.be', network='ib0', pid=1234, cores=8, totalcores=24, usablecores=range(24), num_nodes=1, memory=dict(meminfo=dict(memtotal=total_mem), ulimit='unlimited')) avail = total_mem - hcc.parse_memory('8g') self.assertEqual(hcc.available_memory(node), int(avail * 1. / 3)) node['cores'] = 12 self.assertEqual(hcc.available_memory(node), int(avail * 1. / 2)) node['cores'] = 1 self.assertEqual(hcc.available_memory(node), int(avail * 1. / 24))
def memory_defaults(node_info): """ Return default memory information. """ ncores = node_info["cores"] hadoop_memory = available_memory(node_info) min_container_sz = min_container_size(hadoop_memory) num_containers = min(2 * ncores, hadoop_memory / min_container_sz) ram_per_container = max(min_container_sz, hadoop_memory / num_containers) return MemDefaults(hadoop_memory, min_container_sz, num_containers, ram_per_container)
def memory_defaults(node_info): ''' Return default memory information. ''' ncores = node_info['cores'] hadoop_memory = available_memory(node_info) min_container_sz = min_container_size(hadoop_memory) num_containers = min(2 * ncores, hadoop_memory / min_container_sz) ram_per_container = max(min_container_sz, hadoop_memory / num_containers) return MemDefaults(hadoop_memory, min_container_sz, num_containers, ram_per_container)
def test_available_memory_ulimit(self): total_mem = 68719476736 node = dict(fqdn='hosty.domain.be', network='ib0', pid=1234, cores=8, totalcores=24, usablecores=range(24), num_nodes=1, memory=dict(meminfo=dict(memtotal=total_mem), ulimit='12345')) self.assertEqual(hcc.available_memory(node), 12345)