def update_events(events, conn): keys = ['status'] # TODO: maybe add this too => ,'last_mod_date' for e in events: rowcount = spsqlutils.update(e, keys, conn) # check if rowcount == 0: raise SPException("SPEVEDAO-001", "event not found (event_id=%s)" % e.event_id)
def add(self, *states): for state in states: # prevent duplicate if state.source in self.states: raise SPException() self.states[state.source] = state if state.initial: self.init_state = state
def get_pipeline(name): if name not in pipelines: pipeline_definition_file = '%s/%s.py' % (spconfig.pipeline_folder, name) if not os.path.exists(pipeline_definition_file): raise SPException( "SPPPIPEL-001", "Pipeline definition file not found (%s)" % pipeline_definition_file) pipeline_definition_module = imp.load_source(name, pipeline_definition_file) pipelines[name] = pipeline_definition_module.get_pipeline() return copy.deepcopy(pipelines[name])
def log(code,message,level,stdout,stderr,logfile): # check code length if len(code)!=12: raise SPException("SYNDALOG-002","%s have an incorrect length"%code) # retrieve modulecode part of code modulecode=code[0:8] if level>=get_verbosity_level(): if stdout: sptools.print_stdout(message) if stderr: sptools.print_stderr(message) if logfile: logger.log(level,message,extra={'code' : code})
def get_module(): name = 'spbindings' dep_file = '%s/%s.py' % (spconfig.pipeline_folder, name) if not os.path.exists(dep_file): raise SPException("SPPIPDEP-001", "Pipeline dependency file not found (%s)" % dep_file) try: sys.dont_write_bytecode = True pipeline_dependency_module = imp.load_source(name, dep_file) finally: sys.dont_write_bytecode = False return pipeline_dependency_module
def add_ppprun(pipeline, status, project, model, dataset_pattern, variable, conn): if spppprdao.exists_ppprun( PPPRun(pipeline=pipeline, dataset_pattern=dataset_pattern, variable=variable), conn): # retrieve pipeline from db pppruns = spppprdao.get_pppruns(order='fifo', pipeline=pipeline, dataset_pattern=dataset_pattern, variable=variable, conn=conn) if len(pppruns) != 1: raise SPException( "SPPOSTPR-440", "Incorrect number of runs (number_of_runs=%i,pipeline=%s,dataset_pattern=%s,variable=%s)" % (len(pppruns), pipeline, dataset_pattern, variable)) else: ppprun = pppruns[0] if ppprun.status in [ spconst.PPPRUN_STATUS_PAUSE, spconst.PPPRUN_STATUS_DONE ]: # check existing pipeline state (if state do not allow us to restart it, we raise PipelineRunningException). This is to prevent a reset on a running pipeline. 'waiting' is not accepted to prevent race condition (job starting just while we are here) => TBC. restart_pipeline(ppprun, status, conn) else: raise PipelineRunningException() else: ppprun = build_ppprun(pipeline, status, project, model, dataset_pattern, variable) id_ = spppprdao.add_ppprun( ppprun, conn) # autoincrement field is stored in 'id_'. Not used for now. splog.info( 'SPPOSTPR-052', 'New pipeline added (%s,%s,%s,%s,%s,%s)' % (pipeline, status, project, model, dataset_pattern, variable))
def check_path(path): if not os.path.exists(path): raise SPException("SPCONFIG-101", "Path not found (%s)" % path)
def check_path(path): if not os.path.exists(path): raise SPException("SPCONFIG-101", "Path not found (%s)" % path) # init module. os.umask(0002) system_pkg_install = False if not system_pkg_install: if 'SP_HOME' not in os.environ: raise SPException('SPCONFIG-010', "'SP_HOME' not set") root_folder = os.environ['SP_HOME'] tmp_folder = "%s/tmp" % root_folder log_folder = "%s/log" % root_folder conf_folder = "%s/conf" % root_folder else: root_folder = '/usr/share/python/synda/sdp' tmp_folder = '/var/tmp/synda/sdp' log_folder = '/var/log/synda/sdp' conf_folder = '/etc/synda/sdp' bin_folder = "%s/bin" % root_folder default_pipeline_folder = '%s/pipeline' % conf_folder