def from_yaml(data): try: name = data['challenge'] title = data['title'] description = data['description'] protocol = data['protocol'] date_open = data['date-open'] date_close = data['date-close'] roles = data['roles'] transitions = data['transitions'] steps = data['steps'] Steps = {} for k, v in steps.items(): Steps[k] = ChallengeStep.from_yaml(name, v) return ChallengeDescription(name, title, description, protocol, date_open, date_close, Steps, roles=roles, transitions=transitions) except KeyError as e: msg = 'Missing config %s' % e raise_wrapped(InvalidChallengeDescription, e, msg)
def substitute_dependencies(a, db): from compmake import Promise # XXX: this is a workaround if type(a).__name__ in ['ObjectSpec']: return deepcopy(a) if isinstance(a, dict): ca = type(a) rest = [(k, substitute_dependencies(v, db=db)) for k, v in a.items()] try: res = ca(rest) #print('%s->%s' % (a, str(res))) return res except (BaseException, TypeError) as e: msg = 'Could not instance something looking like a dict.', raise_wrapped(Exception, e, msg, ca=ca) elif isinstance(a, list): # XXX: This fails for subclasses of list return type(a)([substitute_dependencies(x, db=db) for x in a]) elif isinstance(a, tuple): # XXX: This fails for subclasses of tuples return type(a)([substitute_dependencies(x, db=db) for x in a]) elif isinstance(a, Promise): s = get_job_userobject(a.job_id, db=db) return substitute_dependencies(s, db=db) else: return deepcopy(a)
def bel(self, x): from .space import NotBelongs try: f(self, x) except NotBelongs as e: msg = 'Point does not belong to space.' raise_wrapped(NotBelongs, e, msg, space=self, x=x, compact=True) return f
def __init__(self, dp, fnames, rnames, icon=None): assert isinstance(dp, PrimitiveDP), type(dp) self.dp = dp F = self.dp.get_fun_space() R = self.dp.get_res_space() try: # assume that dp has product spaces of given length if isinstance(rnames, list): if not len(set(rnames)) == len(rnames): raise ValueError('Repeated rnames.') if isinstance(fnames, list): if not len(set(fnames)) == len(fnames): raise ValueError('Repeated fnames.') if isinstance(fnames, str): self.F_single = True self.Fname = fnames else: if isinstance(F, PosetProduct): if not isinstance(fnames, list) or not len(F) == len(fnames): raise ValueError("F incompatible") self.F_single = False self.Fnames = fnames else: if not isinstance(fnames, str): msg = "F and fnames incompatible: not a string" raise_desc(ValueError, msg, F=F, fnames=fnames) self.F_single = True self.Fname = fnames if isinstance(rnames, str): self.R_single = True self.Rname = rnames else: if isinstance(R, PosetProduct): if not isinstance(rnames, list) or not len(R) == len(rnames): raise ValueError("R incompatible") self.R_single = False self.Rnames = rnames else: if not isinstance(rnames, str): msg = "R and rnames incompatible: not a string" raise_desc(ValueError, msg, R=R, rnames=rnames) self.R_single = True self.Rname = rnames self.icon = icon except Exception as e: msg = 'Cannot wrap primitive DP.' raise_wrapped(ValueError, e, msg, dp=self.dp, F=F, R=R, fnames=fnames, rnames=rnames, exc=sys.exc_info())
def poset_check_chain(poset, chain): """ Raises an exception if the chain is not a chain. """ chain = list(chain) for i in range(len(chain) - 1): try: poset.check_leq(chain[i], chain[i + 1]) except NotLeq as e: msg = ('Fails for i = %s: %s ≰ %s' % (i, chain[i], chain[i + 1])) raise_wrapped(ValueError, e, msg, compact=True, chain=chain, poset=poset) return True
def from_dict(res): from compmake.jobs.result_dict import result_dict_check result_dict_check(res) try: res['abort'] e = HostFailed(host=res['host'], job_id=res['job_id'], bt=res['bt'], reason=res['reason']) except KeyError as e: raise_wrapped(CompmakeBug, e, 'Incomplete dict', res=res, keys=list(res.keys())) return e
def make_server_request(token, endpoint, data=None, method='GET', timeout=3): """ Raise RequestFailed or ConnectionError. Returns the result in 'result'. """ server = get_duckietown_server_url() url = server + endpoint headers = {'X-Messaging-Token': token} if data is not None: data = json.dumps(data) req = urllib2.Request(url, headers=headers, data=data) req.get_method = lambda: method try: res = urllib2.urlopen(req, timeout=timeout) data = res.read() except urllib2.URLError as e: msg = 'Cannot connect to server %s' % url raise_wrapped(ConnectionError, e, msg) raise try: result = json.loads(data) except ValueError as e: msg = 'Cannot read answer from server.' msg += '\n\n' + indent(data, ' > ') raise_wrapped(ConnectionError, e, msg) raise if not isinstance(result, dict) or 'ok' not in result: msg = 'Server provided invalid JSON response. Expected a dict with "ok" in it.' msg += '\n\n' + indent(data, ' > ') raise ConnectionError(msg) if result['ok']: if 'result' not in result: msg = 'Server provided invalid JSON response. Expected a field "result".' msg += '\n\n' + indent(result, ' > ') raise ConnectionError(msg) return result['result'] else: msg = 'Failed request for %s:\n%s' % (url, result.get('error', result)) raise RequestFailed(msg)
def substitute_dependencies(a, db): from compmake import Promise # XXX: this is a workaround if leave_it_alone(a): return deepcopy(a) if isinstance(a, dict): ca = type(a) rest = [(k, substitute_dependencies(v, db=db)) for k, v in a.items()] try: res = ca(rest) #print('%s->%s' % (a, str(res))) return res except (BaseException, TypeError) as e: msg = 'Could not instance something looking like a dict.', raise_wrapped(Exception, e, msg, ca=ca) elif isinstance(a, list): # XXX: This fails for subclasses of list return type(a)([substitute_dependencies(x, db=db) for x in a]) elif isinstance(a, tuple): # First, check that there are dependencies deps_in_tuple = collect_dependencies(a) if not deps_in_tuple: # if not, just return the tuple return a # XXX: This fails for subclasses of tuples assert not isnamedtupleinstance(a), a ta = type(a) contents = ([substitute_dependencies(x, db=db) for x in a]) try: return ta(contents) except TypeError as e: msg = 'Cannot reconstruct complex tuple.' raise_wrapped(ValueError, e, msg, ta=ta, contents=contents) elif isinstance(a, Promise): s = get_job_userobject(a.job_id, db=db) return substitute_dependencies(s, db=db) else: return a
def __call__(self, x): if do_extra_checks(): D = self.get_domain() try: D.belongs(x) except NotBelongs as e: msg = 'Point does not belong to domain.' raise_wrapped(NotBelongs, e, msg, map=self, x=x, domain=D) y = self._call(x) if do_extra_checks(): C = self.get_codomain() try: C.belongs(y) except NotBelongs as e: msg = 'Point does not belong to codomain.' raise_wrapped(NotBelongs, e, msg, map=self, y=y, codomain=C) return y
def mplayer_identify(filename, intolerant=True): """ Returns a dictionary, with fields: width, height fps * length * extra_mencoder_info Mplayer might fail to identify FPS and length. If intolerant=True, an error is raised, otherwise they are set to None. """ # need at least 1 frame otherwise sometimes the video aspect is not known args = ('mplayer -identify -vo null -ao null -frames 1'.split() + [filename]) try: try: res = system_cmd_result('.', args, display_stdout=False, display_stderr=False, raise_on_error=True, capture_keyboard_interrupt=False) except CmdException: raise except Exception as e: raise_wrapped(Exception, e, "Could not identify movie", filename=filename) try: output = res.stdout return parse_mplayer_info_output(output,intolerant=intolerant) except Exception as e: raise_wrapped(Exception, e, "Could not identify movie", cmd=" ".join(args), filename=filename, output=output)
def interpret_commands_wrap(commands, context, cq): """ Returns None or raises CommandFailed, ShellExitRequested, CompmakeBug, KeyboardInterrupt. """ assert context is not None publish(context, 'command-line-starting', command=commands) try: interpret_commands(commands, context=context, cq=cq) publish(context, 'command-line-succeeded', command=commands) except CompmakeBug: raise except UserError as e: publish(context, 'command-line-failed', command=commands, reason=e) raise CommandFailed(str(e)) except CommandFailed as e: publish(context, 'command-line-failed', command=commands, reason=e) raise except (KeyboardInterrupt, JobInterrupted) as e: publish(context, 'command-line-interrupted', command=commands, reason='KeyboardInterrupt') # If debugging # tb = traceback.format_exc() # print tb # XXX raise CommandFailed(str(e)) # raise CommandFailed('Execution of %r interrupted.' % commands) except ShellExitRequested: raise except Exception as e: tb = traceback.format_exc() msg0 = ('Warning, I got this exception, while it should ' 'have been filtered out already. ' 'This is a compmake BUG that should be reported ' 'at http://github.com/AndreaCensi/compmake/issues') msg = msg0 + "\n" + indent(tb, 'bug| ') publish(context, 'compmake-bug', user_msg=msg, dev_msg="") # XXX raise_wrapped(CompmakeBug, e, msg)
def interpret_commands_wrap(commands, context, cq): """ Returns None or raises CommandFailed, ShellExitRequested, CompmakeBug, KeyboardInterrupt. """ assert context is not None publish(context, 'command-line-starting', command=commands) try: interpret_commands(commands, context=context, cq=cq) publish(context, 'command-line-succeeded', command=commands) except CompmakeBug: raise except UserError as e: publish(context, 'command-line-failed', command=commands, reason=e) raise CommandFailed(str(e)) except CommandFailed as e: publish(context, 'command-line-failed', command=commands, reason=e) raise except (KeyboardInterrupt, JobInterrupted) as e: publish(context, 'command-line-interrupted', command=commands, reason='KeyboardInterrupt') # If debugging # tb = traceback.format_exc() # print tb # XXX raise CommandFailed(str(e)) # raise CommandFailed('Execution of %r interrupted.' % commands) except ShellExitRequested: raise except Exception as e: tb = traceback.format_exc() msg0 = ('Warning, I got this exception, while it should ' 'have been filtered out already. ' 'This is a compmake BUG that should be reported ' 'at http://github.com/AndreaCensi/compmake/issues') msg = msg0 + "\n" + indent(tb, 'bug| ') publish(context, 'compmake-bug', user_msg=msg, dev_msg="") # XXX raise_wrapped(CompmakeBug, e, msg0)
def comp_(context, command_, *args, **kwargs): """ Main method to define a computation step. Extra arguments: :arg:job_id: sets the job id (respects job_prefix) :arg:extra_dep: extra dependencies (not passed as arguments) :arg:command_name: used to define job name if job_id not provided. If not given, command_.__name__ is used. :arg:needs_context: if this is a dynamic job Raises UserError if command is not pickable. """ db = context.get_compmake_db() command = command_ if hasattr(command, '__module__') and command.__module__ == '__main__': if not command in WarningStorage.warned: if WarningStorage.warned: # already warned for another function msg = ('(Same warning for function %r.)' % command.__name__) else: msg = ("A warning about the function %r: " % command.__name__) msg += ( "This function is defined directly in the __main__ " "module, " "which means that it cannot be pickled correctly due to " "a limitation of Python and 'make new_process=1' will " "fail. " "For best results, please define functions in external " "modules. " 'For more info, read ' 'http://stefaanlippens.net/pickleproblem ' 'and the bug report http://bugs.python.org/issue5509.') warning(msg) WarningStorage.warned.add(command) if get_compmake_status() == CompmakeConstants.compmake_status_slave: return None # Check that this is a pickable function try: try_pickling(command) except Exception as e: msg = ('Cannot pickle function. Make sure it is not a lambda ' 'function or a nested function. (This is a limitation of ' 'Python)') raise_wrapped(UserError, e, msg, command=command) if CompmakeConstants.command_name_key in kwargs: command_desc = kwargs.pop(CompmakeConstants.command_name_key) elif hasattr(command, '__name__'): command_desc = command.__name__ else: command_desc = type(command).__name__ args = list(args) # args is a non iterable tuple # Get job id from arguments if CompmakeConstants.job_id_key in kwargs: # make sure that command does not have itself a job_id key try: argspec = inspect.getargspec(command) except TypeError: # Assume Cython function # XXX: write test pass else: if CompmakeConstants.job_id_key in argspec.args: msg = ("You cannot define the job id in this way because %r " "is already a parameter of this function." % CompmakeConstants.job_id_key) raise UserError(msg) job_id = kwargs[CompmakeConstants.job_id_key] check_isinstance(job_id, six.string_types) if ' ' in job_id: msg = 'Invalid job id: %r' % job_id raise UserError(msg) job_prefix = context.get_comp_prefix() if job_prefix: job_id = '%s-%s' % (job_prefix, job_id) del kwargs[CompmakeConstants.job_id_key] if context.was_job_defined_in_this_session(job_id): # unless it is dynamically geneterated if not job_exists(job_id, db=db): msg = 'The job %r was defined but not found in DB. I will let it slide.' % job_id print(msg) else: msg = 'The job %r was already defined in this session.' % job_id old_job = get_job(job_id, db=db) msg += '\n old_job.defined_by: %s ' % old_job.defined_by msg += '\n context.currently_executing: %s ' % context.currently_executing msg += ' others defined in session: %s' % context.get_jobs_defined_in_this_session( ) print(msg) # warnings.warn('I know something is more complicated here') # if old_job.defined_by is not None and # old_job.defined_by == context.currently_executing: # # exception, it's ok # pass # else: msg = 'Job %r already defined.' % job_id raise UserError(msg) else: if job_exists(job_id, db=db): # ok, you gave us a job_id, but we still need to check whether # it is the same job stack = context.currently_executing defined_by = get_job(job_id, db=db).defined_by if defined_by == stack: # this is the same job-redefining pass else: for i in range(1000): # XXX n = '%s-%d' % (job_id, i) if not job_exists(n, db=db): job_id = n break if False: print('The job_id %r was given explicitly but already ' 'defined.' % job_id) print('current stack: %s' % stack) print(' its stack: %s' % defined_by) print('New job_id is %s' % job_id) else: job_id = generate_job_id(command_desc, context=context) context.add_job_defined_in_this_session(job_id) # could be done better if 'needs_context' in kwargs: needs_context = True del kwargs['needs_context'] else: needs_context = False if CompmakeConstants.extra_dep_key in kwargs: extra_dep = kwargs[CompmakeConstants.extra_dep_key] del kwargs[CompmakeConstants.extra_dep_key] if not isinstance(extra_dep, (list, Promise)): msg = ('The "extra_dep" argument must be a list of promises; ' 'got: %s' % describe_value(extra_dep)) raise ValueError(msg) if isinstance(extra_dep, Promise): extra_dep = [extra_dep] assert isinstance(extra_dep, list) for ed in extra_dep: if not isinstance(ed, Promise): msg = ('The "extra_dep" argument must be a list of promises; ' 'got: %s' % describe_value(extra_dep)) raise ValueError(msg) extra_dep = collect_dependencies(extra_dep) else: extra_dep = set() children = collect_dependencies([args, kwargs]) children.update(extra_dep) for c in children: if not job_exists(c, db): msg = "Job %r references a job %r that doesnt exist." % (job_id, c) raise ValueError(msg) all_args = (command, args, kwargs) assert len(context.currently_executing) >= 1 assert context.currently_executing[0] == 'root' c = Job(job_id=job_id, children=children, command_desc=command_desc, needs_context=needs_context, defined_by=context.currently_executing) # Need to inherit the pickle if context.currently_executing[-1] != 'root': parent_job = get_job(context.currently_executing[-1], db) c.pickle_main_context = parent_job.pickle_main_context if job_exists(job_id, db): old_job = get_job(job_id, db) if old_job.defined_by != c.defined_by: warning('Redefinition of %s: ' % job_id) warning(' cur defined_by: %s' % c.defined_by) warning(' old defined_by: %s' % old_job.defined_by) if old_job.children != c.children: #warning('Redefinition problem:') #warning(' old children: %s' % (old_job.children)) #warning(' old dyn children: %s' % old_job.dynamic_children) #warning(' new children: %s' % (c.children)) # fixing this for x, deps in old_job.dynamic_children.items(): if not x in c.children: # not a child any more # FIXME: ok but note it might be a dependence of a child # continue pass c.dynamic_children[x] = deps for j in deps: if not j in c.children: c.children.add(j) if old_job.parents != c.parents: # warning('Redefinition of %s: ' % job_id) # warning(' cur parents: %s' % (c.parents)) # warning(' old parents: %s' % old_job.parents) for p in old_job.parents: c.parents.add(p) # TODO: preserve defines # from compmake.ui.visualization import info # info('defining job %r with children %r' % (job_id, # c.children)) # if True or c.defined_by == ['root']: for child in children: db_job_add_parent_relation(child=child, parent=job_id, db=db) if get_compmake_config('check_params') and job_exists(job_id, db): # OK, this is going to be black magic. # We want to load the previous job definition, # however, by unpickling(), it will start # __import__()ing the modules, perhaps # even the one that is calling us. # What happens, then is that it will try to # add another time this computation recursively. # What we do, is that we temporarely switch to # slave mode, so that recursive calls to comp() # are disabled. # old_status = get_compmake_status() # set_compmake_status( # CompmakeConstants.compmake_status_slave) all_args_old = get_job_args(job_id, db=db) # set_compmake_status(old_status) same, reason = same_computation(all_args, all_args_old) if not same: #print('different job, cleaning cache:\n%s ' % reason) from compmake.jobs.actions import clean_targets clean_targets([job_id], db) # if job_cache_exists(job_id, db): # delete_job_cache(job_id, db) publish(context, 'job-redefined', job_id=job_id, reason=reason) else: # print('ok, same job') pass # XXX TODO clean the cache # else: # publish(context, 'job-already-defined', # job_id=job_id) set_job_args(job_id, all_args, db=db) set_job(job_id, c, db=db) publish(context, 'job-defined', job_id=job_id) return Promise(job_id)
def comp_(context, command_, *args, **kwargs): """ Main method to define a computation step. Extra arguments: :arg:job_id: sets the job id (respects job_prefix) :arg:extra_dep: extra dependencies (not passed as arguments) :arg:command_name: used to define job name if job_id not provided. If not given, command_.__name__ is used. :arg:needs_context: if this is a dynamic job Raises UserError if command is not pickable. """ db = context.get_compmake_db() command = command_ if hasattr(command, '__module__') and command.__module__ == '__main__': if not command in WarningStorage.warned: if WarningStorage.warned: # already warned for another function msg = ('(Same warning for function %r.)' % command.__name__) else: msg = ("A warning about the function %r: " % command.__name__) msg += ( "This function is defined directly in the __main__ " "module, " "which means that it cannot be pickled correctly due to " "a limitation of Python and 'make new_process=1' will " "fail. " "For best results, please define functions in external " "modules. " 'For more info, read ' 'http://stefaanlippens.net/pickleproblem ' 'and the bug report http://bugs.python.org/issue5509.') warning(msg) WarningStorage.warned.add(command) if get_compmake_status() == CompmakeConstants.compmake_status_slave: return None # Check that this is a pickable function try: try_pickling(command) except Exception as e: msg = ('Cannot pickle function. Make sure it is not a lambda ' 'function or a nested function. (This is a limitation of ' 'Python)') raise_wrapped(UserError, e, msg, command=command) if CompmakeConstants.command_name_key in kwargs: command_desc = kwargs.pop(CompmakeConstants.command_name_key) elif hasattr(command, '__name__'): command_desc = command.__name__ else: command_desc = type(command).__name__ args = list(args) # args is a non iterable tuple # Get job id from arguments if CompmakeConstants.job_id_key in kwargs: # make sure that command does not have itself a job_id key try: argspec = inspect.getargspec(command) except TypeError: # Assume Cython function # XXX: write test pass else: if CompmakeConstants.job_id_key in argspec.args: msg = ("You cannot define the job id in this way because %r " "is already a parameter of this function." % CompmakeConstants.job_id_key) raise UserError(msg) job_id = kwargs[CompmakeConstants.job_id_key] check_isinstance(job_id, six.string_types) if ' ' in job_id: msg = 'Invalid job id: %r' % job_id raise UserError(msg) job_prefix = context.get_comp_prefix() if job_prefix: job_id = '%s-%s' % (job_prefix, job_id) del kwargs[CompmakeConstants.job_id_key] if context.was_job_defined_in_this_session(job_id): # unless it is dynamically geneterated if not job_exists(job_id, db=db): msg = 'The job %r was defined but not found in DB. I will let it slide.' % job_id print(msg) else: msg = 'The job %r was already defined in this session.' % job_id old_job = get_job(job_id, db=db) msg += '\n old_job.defined_by: %s ' % old_job.defined_by msg += '\n context.currently_executing: %s ' % context.currently_executing msg += ' others defined in session: %s' % context.get_jobs_defined_in_this_session() print(msg) # warnings.warn('I know something is more complicated here') # if old_job.defined_by is not None and # old_job.defined_by == context.currently_executing: # # exception, it's ok # pass # else: msg = 'Job %r already defined.' % job_id raise UserError(msg) else: if job_exists(job_id, db=db): # ok, you gave us a job_id, but we still need to check whether # it is the same job stack = context.currently_executing defined_by = get_job(job_id, db=db).defined_by if defined_by == stack: # this is the same job-redefining pass else: for i in range(1000): # XXX n = '%s-%d' % (job_id, i) if not job_exists(n, db=db): job_id = n break if False: print( 'The job_id %r was given explicitly but already ' 'defined.' % job_id) print('current stack: %s' % stack) print(' its stack: %s' % defined_by) print('New job_id is %s' % job_id) else: job_id = generate_job_id(command_desc, context=context) context.add_job_defined_in_this_session(job_id) # could be done better if 'needs_context' in kwargs: needs_context = True del kwargs['needs_context'] else: needs_context = False if CompmakeConstants.extra_dep_key in kwargs: extra_dep = kwargs[CompmakeConstants.extra_dep_key] del kwargs[CompmakeConstants.extra_dep_key] if not isinstance(extra_dep, (list, Promise)): msg = ('The "extra_dep" argument must be a list of promises; ' 'got: %s' % describe_value(extra_dep)) raise ValueError(msg) if isinstance(extra_dep, Promise): extra_dep = [extra_dep] assert isinstance(extra_dep, list) for ed in extra_dep: if not isinstance(ed, Promise): msg = ('The "extra_dep" argument must be a list of promises; ' 'got: %s' % describe_value(extra_dep)) raise ValueError(msg) extra_dep = collect_dependencies(extra_dep) else: extra_dep = set() children = collect_dependencies([args, kwargs]) children.update(extra_dep) for c in children: if not job_exists(c, db): msg = "Job %r references a job %r that doesnt exist." % (job_id, c) raise ValueError(msg) all_args = (command, args, kwargs) assert len(context.currently_executing) >= 1 assert context.currently_executing[0] == 'root' c = Job(job_id=job_id, children=children, command_desc=command_desc, needs_context=needs_context, defined_by=context.currently_executing) # Need to inherit the pickle if context.currently_executing[-1] != 'root': parent_job = get_job(context.currently_executing[-1], db) c.pickle_main_context = parent_job.pickle_main_context if job_exists(job_id, db): old_job = get_job(job_id, db) if old_job.defined_by != c.defined_by: warning('Redefinition of %s: ' % job_id) warning(' cur defined_by: %s' % c.defined_by) warning(' old defined_by: %s' % old_job.defined_by) if old_job.children != c.children: #warning('Redefinition problem:') #warning(' old children: %s' % (old_job.children)) #warning(' old dyn children: %s' % old_job.dynamic_children) #warning(' new children: %s' % (c.children)) # fixing this for x, deps in old_job.dynamic_children.items(): if not x in c.children: # not a child any more # FIXME: ok but note it might be a dependence of a child # continue pass c.dynamic_children[x] = deps for j in deps: if not j in c.children: c.children.add(j) if old_job.parents != c.parents: # warning('Redefinition of %s: ' % job_id) # warning(' cur parents: %s' % (c.parents)) # warning(' old parents: %s' % old_job.parents) for p in old_job.parents: c.parents.add(p) # TODO: preserve defines # from compmake.ui.visualization import info # info('defining job %r with children %r' % (job_id, # c.children)) # if True or c.defined_by == ['root']: for child in children: db_job_add_parent_relation(child=child, parent=job_id, db=db) if get_compmake_config('check_params') and job_exists(job_id, db): # OK, this is going to be black magic. # We want to load the previous job definition, # however, by unpickling(), it will start # __import__()ing the modules, perhaps # even the one that is calling us. # What happens, then is that it will try to # add another time this computation recursively. # What we do, is that we temporarely switch to # slave mode, so that recursive calls to comp() # are disabled. # old_status = get_compmake_status() # set_compmake_status( # CompmakeConstants.compmake_status_slave) all_args_old = get_job_args(job_id, db=db) # set_compmake_status(old_status) same, reason = same_computation(all_args, all_args_old) if not same: #print('different job, cleaning cache:\n%s ' % reason) from compmake.jobs.actions import clean_targets clean_targets([job_id], db) # if job_cache_exists(job_id, db): # delete_job_cache(job_id, db) publish(context, 'job-redefined', job_id=job_id, reason=reason) else: # print('ok, same job') pass # XXX TODO clean the cache # else: # publish(context, 'job-already-defined', # job_id=job_id) set_job_args(job_id, all_args, db=db) set_job(job_id, c, db=db) publish(context, 'job-defined', job_id=job_id) return Promise(job_id)