def execute(self): props["working.dir.nagini"] = join(props["working.dir"], "nagini_data") # if not exists(self.props["working.dir.nagini"]): try: mkdir(props["working.dir.nagini"]) except OSError: pass self.configure() self.logger.info('Init props:\n' + json.dumps(props, ensure_ascii=False, indent=4, sort_keys=True)) output = flatten(self.output()) if not output: self._check_output_at_start = False try: self.logger.info("Nagini: start job") if self._check_output_at_start and all(t.exists() for t in output): self.logger.warning("All targets exists at start " "of the job, skip job...") else: self.logger.info('Nagini: about to execute "run" method') self.run() for key, value in self.env.iteritems(): props["env.%s" % key] = value if self._check_output_at_end and not all(t.exists() for t in output): raise Exception("Not all output target exists " "at end of the job") else: self.on_success() except BaseException as e: self.logger.error("NaginiJob: catch exception. Try on_failure()") self.on_failure() raise
def execute(self): props['working.dir.nagini'] = join(props['working.dir'], 'nagini_data') # if not exists(self.props["working.dir.nagini"]): try: mkdir(props['working.dir.nagini']) except OSError: pass self.configure() self.logger.info(self.get_props_formatter().render()) output = flatten(self.output()) if not output: self._check_output_at_start = False try: self.logger.info('Nagini: start job') if self._check_output_at_start and all(t.exists() for t in output): self.logger.warning( 'All targets exists at start of the job, skip job...') else: self.logger.info('Nagini: about to execute "run" method') self.run() for key, value in iteritems(self.env): props['env.%s' % key] = value if self._check_output_at_end and not all(t.exists() for t in output): raise Exception( 'Not all output target exists at end of the job') else: self.on_success() except: self.logger.exception( 'NaginiJob: catch exception. Try on_failure()') self.on_failure() reraise(*sys.exc_info())
def _get_start_jobs(root): requires = flatten(root.requires()) if requires: for parent in requires: for job in BaseFlow._get_start_jobs(parent): yield job else: yield root
def fill_queue(job, queue=None): if queue is None: queue = [] queue.append(job) requires = flatten(job.requires()) if requires: for parent in requires: fill_queue(parent, queue) return reversed(queue)
def build_dependencies(self): try: job = self.class_obj() except TypeError: # print "Error in class", self.class_obj return for require in flatten(job.requires()): if isinstance(require, EmbeddedFlow): wrapper = EmbeddedFlowWrapper(require.__class__, self.project) elif isinstance(require, BaseFlow): wrapper = EmbeddedFlowWrapper(require.__class__, self.project) wrapper._build_deps = False elif isinstance(require, BaseJob): wrapper = JobWrapper(require.__class__, self.project) else: continue wrapper.flow = self.flow wrapper.build() self.dependencies.append(wrapper)
def on_failure(self): for target in flatten(self.output()): if target.exists(): target.clean_up()