def _enrich_object_sotre_memory(self, sc, object_store_memory): if is_local(sc): assert not object_store_memory, "you should not set object_store_memory on spark local" return resourceToBytes(self._get_ray_plasma_memory_local()) else: return resourceToBytes( str(object_store_memory)) if object_store_memory else None
def _enrich_object_sotre_memory(self, sc, object_store_memory): if is_local(sc): if self.object_store_memory is None: self.object_store_memory = self._get_ray_plasma_memory_local() return resourceToBytes(self.object_store_memory) else: return resourceToBytes( str(object_store_memory)) if object_store_memory else None
def _start_driver(self, object_store_memory="10g"): print("Start to launch ray on local") import ray if not self.is_local: self._start_restricted_worker(self.redis_address, self.ray_service.password, object_store_memory=resourceToBytes(object_store_memory)) ray.shutdown() ray.init(redis_address=self.redis_address, redis_password=self.ray_service.password)
def _get_ray_driver_memory(self): """ :return: memory in bytes """ if "local" in self.sc.master: from psutil import virtual_memory # Memory in bytes total_mem = virtual_memory().total return int(total_mem) else: return resourceToBytes(self.sc._conf.get("spark.driver.memory"))
def _start_driver(self, object_store_memory, num_cores=0, labels="", extra_params={}): print("Start to launch ray on local") import ray if not self.is_local: self._start_restricted_worker( object_store_memory=resourceToBytes(object_store_memory), num_cores=num_cores, labels=labels, extra_params=extra_params) ray.shutdown() ray.init(redis_address=self.redis_address, redis_password=self.ray_service.password)
def test_resource_to_bytes(self): assert 10 == rutils.resourceToBytes("10b") assert 10000 == rutils.resourceToBytes("10k") assert 10000000 == rutils.resourceToBytes("10m") assert 10000000000 == rutils.resourceToBytes("10g")