def __init__(self, client_address: str = None, request_port: int = None, response_port: int = None): super(EDASapp, self).__init__( get_or_else(client_address, EdasEnv.get("wps.server.address", "*")), get_or_else(request_port, EdasEnv.get("request.port", 4556)), get_or_else(response_port, EdasEnv.get("response.port", 4557))) self.process = "edas" self.processManager = None atexit.register(self.term, "ShutdownHook Called") self.logger.info("STARTUP CLUSTER") self.processManager = ProcessManager.initManager(EdasEnv.parms) self.scheduler_info = self.processManager.client.scheduler_info() workers: Dict = self.scheduler_info.pop("workers") self.logger.info(" @@@@@@@ SCHEDULER INFO: " + str(self.scheduler_info)) self.logger.info(f" N Workers: {len(workers)} ") for addr, specs in workers.items(): self.logger.info(f" -----> Worker {addr}: {specs}") log_metrics = EdasEnv.parms.get("log.cwt.metrics", True) if log_metrics: self.metricsThread = Thread(target=self.trackCwtMetrics) self.metricsThread.start()
def processUtilNode(self, node: WorkflowNode) -> EDASDataset: from edas.process.manager import ProcessManager if node.name.lower() == "edas.metrics": processManager = ProcessManager.getManager() metrics = processManager.getCWTMetrics() metrics["@ResultClass"] = "METADATA" metrics["@ResultType"] = "METRICS" return EDASDataset(OrderedDict(), metrics)
def __init__(self, _proj: str, _exp: str, appConf: Dict[str, str] = None): super(DistributedTestManager, self).__init__(_proj, _exp) EdasEnv.update(appConf) log_metrics = appConf.get("log_metrics", False) self.processManager = ProcessManager.initManager(EdasEnv.parms) time.sleep(10) self.processing = False self.scheduler_info = self.processManager.client.scheduler_info() self.workers: Dict = self.scheduler_info.pop("workers") self.logger.info(" @@@@@@@ SCHEDULER INFO: " + str(self.scheduler_info)) self.logger.info(f" N Workers: {len(self.workers)} ") for addr, specs in self.workers.items(): self.logger.info(f" -----> Worker {addr}: {specs}") if log_metrics: self.metricsThread = Thread(target=self.trackCwtMetrics) self.metricsThread.start()
def __init__(self, _project: str, _experiment: str, appConfiguration: Dict[str, str]): self.logger = EDASLogger.getLogger() self.project = _project self.experiment = _experiment self.processManager = ProcessManager(appConfiguration)
class AppTests: def __init__(self, _project: str, _experiment: str, appConfiguration: Dict[str, str]): self.logger = EDASLogger.getLogger() self.project = _project self.experiment = _experiment self.processManager = ProcessManager(appConfiguration) def exec(self, name, domains: List[Dict[str, Any]], variables: List[Dict[str, Any]], operations: List[Dict[str, Any]]) -> Response: job1 = Job.init(self.project, self.experiment, name, domains, variables, operations) return self.runJob(job1) def runJob(self, job: Job, clientId: str = "local") -> Response: try: resultHandler = ExecHandler("local", job.process, workers=job.workers) self.processManager.submitProcess(job.process, job, resultHandler) return Message(clientId, job.process, resultHandler.filePath) except Exception as err: self.logger.error("Caught execution error: " + str(err)) traceback.print_exc() return Message(clientId, job.process, str(err)) def plot(self, filePath: str): try: dset = xa.open_dataset(filePath) vars = list(dset.data_vars.values()) nplots = len(vars) fig, axes = plt.subplots(ncols=nplots) self.logger.info("Plotting {} plots ".format(nplots)) if nplots == 1: vars[0].plot(ax=axes) else: for iaxis, result in enumerate(vars): result.plot(ax=axes[iaxis]) plt.show() except Exception as err: self.logger.error("Error Plotting: {} ".format(str(err))) def test_detrend(self): domains = [{ "name": "d0", "lat": { "start": 0, "end": 50, "system": "values" }, "lon": { "start": 0, "end": 50, "system": "values" }, "time": { "start": '1990-01-01', "end": '2000-01-01', "system": "values" } }, { "name": "d1", "lat": { "start": 20, "end": 20, "system": "values" }, "lon": { "start": 20, "end": 20, "system": "values" } }] variables = [{ "uri": TestDataManager.getAddress("merra2", "tas"), "name": "tas:v0", "domain": "d0" }] operations = [{ "name": "edas.decycle", "input": "v0", "result": "dc" }, { "name": "edas.norm", "axis": "xy", "input": "dc", "result": "dt" }, { "name": "edas.subset", "input": "dt", "domain": "d1" }] return self.exec("test_detrend", domains, variables, operations) def test_norm(self): domains = [{ "name": "d0", "lat": { "start": 20, "end": 40, "system": "values" }, "lon": { "start": 60, "end": 100, "system": "values" } }] variables = [{ "uri": TestDataManager.getAddress("merra2", "tas"), "name": "tas:v0", "domain": "d0" }] operations = [{"name": "edas.norm", "axis": "xy", "input": "v0"}] return self.exec("test_detrend", domains, variables, operations) def plotPerformanceXa(self, filePath: str): while True: if os.path.isfile(filePath): dset = xa.open_dataset(filePath) plotter.plotPerformanceXa(dset, "20crv-ts") print("EXITING PLOT LOOP") return else: time.sleep(0.5) print(".", end='') def test_monsoon_learning(self): domains = [{ "name": "d0", "time": { "start": '1880-01-01T00', "end": '2005-01-01T00', "system": "values" } }] variables = [{ "uri": "archive:pcs-20crv-ts-TN", "name": "pcs:v0", "domain": "d0" }, { "uri": "archive:IITM/monsoon/timeseries", "name": "AI:v1", "domain": "d0", "offset": "1y" }] operations = [{ "name": "edas.filter", "input": "v0", "result": "v0f", "axis": "t", "sel": "aug" }, { "name": "keras.layer", "input": "v0f", "result": "L0", "axis": "m", "units": 64, "activation": "relu" }, { "name": "keras.layer", "input": "L0", "result": "L1", "units": 1, "activation": "linear" }, { "name": "edas.norm", "input": "v1", "axis": "t", "result": "dc" }, { "name": "edas.detrend", "input": "dc", "axis": "t", "wsize": 50, "result": "t1" }, { "name": "keras.train", "axis": "t", "input": "L1,t1", "lr": 0.002, "vf": 0.2, "decay": 0.002, "momentum": 0.9, "epochs": 1000, "batch": 200, "iterations": 50, "target": "t1", "archive": "model-20crv-ts" }] return self.exec("test_monsoon_learning", domains, variables, operations)
def init(self): self.processManager = ProcessManager.initManager(EdasEnv.parms) self.scheduler_info = self.processManager.client.scheduler_info() self.logger.info(" \n @@@@@@@ SCHEDULER INFO:\n " + str(self.scheduler_info))
def __init__(self, _proj: str, _exp: str, appConf: Dict[str, str] = None): super(LocalTestManager, self).__init__(_proj, _exp) EdasEnv.update(appConf) self.processManager = ProcessManager.initManager(EdasEnv.parms)
from edas.workflow.modules.edas import WorldClimKernel import xarray as xa import numpy as np import time import matplotlib import matplotlib.pyplot as plt from edas.workflow.data import EDASArray from edas.process.test import LocalTestManager from edas.process.manager import ProcessManager, ExecHandler from edas.config import EdasEnv appConf = {"sources.allowed": "collection,https", "log.metrics": "true"} EdasEnv.update(appConf) processManager = ProcessManager.initManager(EdasEnv.parms) kernel = WorldClimKernel() ds_tmin = xa.open_dataset( "https://dataserver.nccs.nasa.gov/thredds/dodsC/bypass/CREATE-IP/reanalysis/MERRA2/mon/atmos/tasmin.ncml" ) tasmin = ds_tmin["tasmin"] tasmin = EDASArray("tasmin", "d0", tasmin[0:12, :, :].compute()) ds_tmax = xa.open_dataset( "https://dataserver.nccs.nasa.gov/thredds/dodsC/bypass/CREATE-IP/reanalysis/MERRA2/mon/atmos/tasmax.ncml" ) tasmax = ds_tmax["tasmax"] tasmax = EDASArray("tasmax", "d0", tasmax[0:12, :, :].compute()) ds_pr = xa.open_dataset( "https://dataserver.nccs.nasa.gov/thredds/dodsC/bypass/CREATE-IP/reanalysis/MERRA2/mon/atmos/pr.ncml"