def __init__(self, _proj: str, _exp: str, appConf: Dict[str, str] = None): super(DistributedTestManager, self).__init__(_proj, _exp) EdasEnv.update(appConf) log_metrics = appConf.get("log_metrics", False) self.processManager = ProcessManager.initManager(EdasEnv.parms) time.sleep(10) self.processing = False self.scheduler_info = self.processManager.client.scheduler_info() self.workers: Dict = self.scheduler_info.pop("workers") self.logger.info(" @@@@@@@ SCHEDULER INFO: " + str(self.scheduler_info)) self.logger.info(f" N Workers: {len(self.workers)} ") for addr, specs in self.workers.items(): self.logger.info(f" -----> Worker {addr}: {specs}") if log_metrics: self.metricsThread = Thread(target=self.trackCwtMetrics) self.metricsThread.start()
variable = "t" domains = [{"name": "d0"}] variables = [{ "uri": f"collection://{collection}:", "name": f"{variable}:v0", "domain": "d0" }] operations = [{"name": "xarray.ave", "input": "v0", "axes": "t"}] local = True scheduler = "127.0.0.1:8786" if __name__ == '__main__': print(f"Running test") appConf = {"sources.allowed": "collection,https", "log.metrics": "true"} EdasEnv.update(appConf) if local: print(f"Initializing Local Dask cluster") client = Client() else: if scheduler is None: cluster = EDASCluster() print( "Initializing Dask-distributed cluster with scheduler address: " + cluster.scheduler_address) client = Client(cluster.scheduler_address, timeout=64) time.sleep(20) else: print("Initializing client with existing scheduler at: " + scheduler)
def __init__(self, _proj: str, _exp: str, appConf: Dict[str, str] = None): super(LocalTestManager, self).__init__(_proj, _exp) EdasEnv.update(appConf) self.processManager = ProcessManager.initManager(EdasEnv.parms)
def __init__(self, _proj: str, _exp: str, appConf: Dict[str, str] = None): super(LocalTestManager, self).__init__(_proj, _exp) EdasEnv.update(appConf)