def __init__(self, name, title, description, protocol, date_open, date_close, steps, roles, transitions): self.name = name self.title = title self.description = description self.protocol = protocol self.date_open = date_open check_isinstance(date_open, datetime) check_isinstance(date_close, datetime) self.date_close = date_close self.steps = steps self.roles = roles for k, permissions in self.roles.items(): if not k.startswith('user:'******'Permissions should start with "user:", %s' % k raise InvalidChallengeDescription(msg) p2 = dict(**permissions) for perm in allowed_permissions: p2.pop(perm, None) if p2: msg = 'Unknown permissions: %s' % p2 raise InvalidChallengeDescription(msg) self.first_step = None self.ct = ChallengeTransitions(transitions, list(self.steps))
def integrate(self, dt, commands): """ :param dt: :param commands: an instance of CarCommands :return: """ check_isinstance(commands, CarCommands) # compute the linear, angular velocities for the platform # using the simple car equations longitudinal = commands.linear_velocity angular = commands.linear_velocity / self.parameters.wheel_distance * math.tan( commands.steering_angle) lateral = 0 linear = [longitudinal, lateral] # represent this as se(2) commands_se2 = geo.se2_from_linear_angular(linear, angular) # Call the "integrate" function of GenericKinematicsSE2 s1 = GenericKinematicsSE2.integrate(self, dt, commands_se2) # new state c1 = s1.q0, s1.v0 t1 = s1.t0 return CarDynamics(self.parameters, c1, t1)
def integrate(self, dt, commands): """ :param dt: :param commands: an instance of CarCommands :return: """ check_isinstance(commands, CarCommands) # Your code comes here! q,_ = self.TSE2_from_state() _, direction = geo.translation_angle_from_SE2(q) linear = [commands.linear_velocity, 0] angular = (commands.linear_velocity / self.parameters.wheel_distance) * math.tan(commands.steering_angle) # represent this as se(2) commands_se2 = geo.se2_from_linear_angular(linear, angular) # Call the "integrate" function of GenericKinematicsSE2 s1 = GenericKinematicsSE2.integrate(self, dt, commands_se2) # new state c1 = s1.q0, s1.v0 t1 = s1.t0 return CarDynamics(self.parameters, c1, t1)
def integrate(self, dt, commands): """ :param dt: :param commands: an instance of WheelVelocityCommands :return: """ check_isinstance(commands, WheelVelocityCommands) # Compute the linear velocity for the wheels # by multiplying radius times angular velocity v_r = self.parameters.radius_right * commands.right_wheel_angular_velocity v_l = self.parameters.radius_left * commands.left_wheel_angular_velocity # compute the linear, angular velocities for the platform # using the differential drive equations longitudinal = (v_r + v_l) * 0.5 angular = (v_r - v_l) / self.parameters.wheel_distance lateral = 0.0 linear = [longitudinal, lateral] # represent this as se(2) commands_se2 = geo.se2_from_linear_angular(linear, angular) # Call the "integrate" function of GenericKinematicsSE2 s1 = GenericKinematicsSE2.integrate(self, dt, commands_se2) # new state c1 = s1.q0, s1.v0 t1 = s1.t0 return DifferentialDriveDynamics(self.parameters, c1, t1)
def integrate(self, dt, commands): """ :param dt: :param commands: an instance of CarCommands :return: """ check_isinstance(commands, CarCommands) # calculate linear velocities x_vel = commands.linear_velocity y_vel = 0 linear = [x_vel, y_vel] # calculate angular velocities angular = commands.linear_velocity * math.tan( commands.steering_angle) / self.parameters.wheel_distance # represent this as se(2) commands_se2 = geo.se2_from_linear_angular(linear, angular) # Call the "integrate" function of GenericKinematicsSE2 s1 = GenericKinematicsSE2.integrate(self, dt, commands_se2) # new state c1 = s1.q0, s1.v0 t1 = s1.t0 return CarDynamics(self.parameters, c1, t1)
def integrate(self, dt, commands): """ :param dt: :param commands: an instance of CarCommands :return: """ check_isinstance(commands, CarCommands) # Your code comes here! # linear_velocity = [x_dot, y_dot] linear = [commands.linear_velocity, 0] # angular = [theta_dot] angular = commands.linear_velocity/self.parameters.wheel_distance * np.tan(commands.steering_angle) # represent this as se(2) commands_se2 = geo.se2_from_linear_angular(linear, angular) # Call the "integrate" function of GenericKinematicsSE2 s1 = GenericKinematicsSE2.integrate(self, dt, commands_se2) # new state c1 = s1.q0, s1.v0 t1 = s1.t0 return CarDynamics(self.parameters, c1, t1)
def __init__(self, nid=None, children=None, caption=None): check_isinstance(nid, (type(None),) + six.string_types) check_isinstance(caption, (type(None), six.text_type)) if children is not None and not isinstance(children, list): raise ValueError( "Received a %s object as children list, should" " be None or list." % describe_type(children) ) self.nid = nid if children is None: children = [] self.childid2node = {} self.children = [] for c in children: if not isinstance(c, Node): msg = "Expected Node, got %s." % describe_type(c) raise ValueError(msg) self.add_child(c) self.parent = None self.caption = caption # used by subsection(), set_subsections_needed() self._subsections_needed = None # Reference to a figure that was created automatically # (see code in add_to_autofigure) self._auto_figure = None
def mvac_job_rdb(args): import multyvac job_id, context, event_queue_name, show_output, volumes, \ rdb_vol_name, rdb_db, cwd = args check_isinstance(job_id, six.string_types) # Disable multyvac logging disable_logging_if_config(context) multyvac_job = mvac_job_rdb_instance(context, job_id, volumes, rdb_vol_name, rdb_db, cwd) multyvac_job.wait() db = context.get_compmake_db() vol = multyvac.volume.get(rdb_vol_name) # @UndefinedVariable res = multyvac_job.get_result() result_dict_check(res) # is there something to download? stuff = ('fail' in res) or ('new_jobs' in res) # alternatives = bug, abort if stuff: new_jobs = res.get('new_jobs',[]) transfer_down(db, vol, rdb_db, job_id, new_jobs) return res
def __init__(self, width, control_points, *args, **kwargs): PlacedObject.__init__(self, *args, **kwargs) self.width = width self.control_points = control_points for p in control_points: check_isinstance(p, SE2Transform)
def descendants(a_job_id): deps = collect_dependencies(get_job_userobject(a_job_id, self.db)) children = self.direct_children(a_job_id) check_isinstance(children, set) r = children | deps check_isinstance(r, set) return r
def __init__(self, resource, image, web_image, caption): if six.PY2 and isinstance(caption, bytes): caption = caption.decode("utf-8") check_isinstance(caption, (type(None), six.text_type)) self.resource = resource self.image = image self.web_image = web_image self.caption = caption
def __init__(self, nid=None, caption=None, cols=None): Node.__init__(self, nid=nid) check_isinstance(caption, (type(None), six.text_type)) self.caption = caption self.cols = cols self.subfigures = [] self.automatically_added = []
def __init__(self, name, title, description, evaluation_parameters, features_required): self.name = name self.title = title self.description = description check_isinstance(evaluation_parameters, dict) self.evaluation_parameters = evaluation_parameters check_isinstance(features_required, dict) self.features_required = features_required
def __init__(self, parameters, c0, t0): """ :param parameters: instance of DifferentialDriveDynamicsParameters :param c0: initial configuration :param t0: initial time """ check_isinstance(parameters, DifferentialDriveDynamicsParameters) self.parameters = parameters GenericKinematicsSE2.__init__(self, c0, t0)
def __init__(self, parameters, c0, t0): """ :param parameters: instance of CarParameters :param c0: initial configuration :param t0: initial time """ check_isinstance(parameters, CarParameters) self.parameters = parameters GenericKinematicsSE2.__init__(self, c0, t0)
def add_prefix(s, prefix): from contracts import check_isinstance check_isinstance(s, six.string_types) check_isinstance(prefix, six.string_types) result = "" for l in s.split('\n'): result += prefix + l + '\n' # chop last newline result = result[:-1] return result
def get(self, timeout=0): # @UnusedVariable if self.result is None: try: self.result = self.result_queue.get(block=True, timeout=timeout) except Empty as e: raise TimeoutError(e) check_isinstance(self.result, dict) result_dict_raise_if_error(self.result) return self.result
def set_object(self, name, ob, **transforms): check_isinstance(name, six.string_types) assert self is not ob self.children[name] = ob type2klass = {'ground_truth': GroundTruth} for k, v in transforms.items(): klass = type2klass[k] st = klass(a=(), b=(name, ), transform=v) i = len(self.spatial_relations) self.spatial_relations[i] = st
def expect_parse(expr, s, expected): check_isinstance(s, str) check_isinstance(expected, (type(None), Language)) res = expr.parseString(s, parseAll=True) res = res[0] print(f'Obtained: {res}') print(f'Expected: {expected}') if expected: assert_equal(res, expected)
def set_metric(self, name, total, title=None, incremental=None, description=None, cumulative=None): check_isinstance(name, tuple) self.metrics[name] = EvaluatedMetric(total=total, incremental=incremental, title=title, description=description, cumulative=cumulative)
def __getitem__(self, attrs): if not isinstance(attrs, frozendict2): check_isinstance(attrs, dict) attrs = frozendict2(**attrs) try: return dict.__getitem__(self, attrs) except KeyError as e: msg = str(e) keys = self.keys() if keys: k = most_similar(self.keys(), attrs) msg += '\n The most similar key is: %s' % str(k) raise KeyError(msg)
def bs(fragment: str): """ Returns the contents wrapped in an element called "fragment". Expects fragment as a str in utf-8 """ check_isinstance(fragment, six.string_types) s = u'<fragment>%s</fragment>' % fragment wire = s.encode('utf-8') parsed = BeautifulSoup(wire, 'lxml') res = parsed.html.body.fragment assert res.name == 'fragment' return res
def set_metric(self, name: Tuple[str, ...], total: Union[float, int], title: Optional[str] = None, description: Optional[str] = None, incremental: Optional[SampledSequence] = None, cumulative: Optional[SampledSequence] = None): check_isinstance(name, tuple) self.metrics[name] = EvaluatedMetric(total=total, incremental=incremental, title=title, description=description, cumulative=cumulative)
def __getitem__(self, attrs): if not isinstance(attrs, frozendict2): check_isinstance(attrs, dict) attrs = frozendict2(**attrs) try: return dict.__getitem__(self, attrs) except KeyError as e: msg = str(e) keys = self.keys() if keys: k = most_similar(self.keys(), attrs) msg += "\n The most similar key is: %s" % str(k) raise KeyError(msg)
def stag(name, text, _class=None, _id=None, href=None): """ If text is none, it is replaced by the empty string. """ check_isinstance(name, (str, unicode)) check_isinstance(text, (str, unicode)) if text is None: text = '' t = Tag(name=name) t.append(NavigableString(text)) if _class is not None: t['class'] = _class if _id is not None: t['id'] = _id if href is not None: t['href'] = href return t
def line_and_col(loc, strg): """Returns (line, col), both 0 based.""" from .utils import check_isinstance check_isinstance(loc, int) check_isinstance(strg, six.string_types) # first find the line lines = strg.split('\n') if loc == len(strg): # Special case: we mark the end of the string last_line = len(lines) - 1 last_char = len(lines[-1]) return last_line, last_char if loc > len(strg): msg = ('Invalid loc = %d for s of len %d (%r)' % (loc, len(strg), strg)) raise ValueError(msg) res_line = 0 l = loc while True: if not lines: assert loc == 0, (loc, strg.__repr__()) break first = lines[0] if l >= len(first) + len('\n'): lines = lines[1:] l -= (len(first) + len('\n')) res_line += 1 else: break res_col = l inverse = location(res_line, res_col, strg) if inverse != loc: msg = 'Could not find line and col' from .utils import raise_desc raise_desc(AssertionError, msg, s=strg, loc=loc, res_line=res_line, res_col=res_col, loc_recon=inverse) return (res_line, res_col)
def job_compute(job, context): """ Returns a dictionary with fields "user_object" and "new_jobs" """ check_isinstance(job, Job) job_id = job.job_id db = context.get_compmake_db() int_load_results = IntervalTimer() command, args, kwargs = get_cmd_args_kwargs(job_id, db=db) int_load_results.stop() JobCompute.current_job_id = job_id if job.needs_context: args = tuple(list([context]) + list(args)) int_compute = IntervalTimer() res = execute_with_context(db=db, context=context, job_id=job_id, command=command, args=args, kwargs=kwargs) int_compute.stop() assert isinstance(res, dict) assert len(res) == 2, list(res.keys()) assert 'user_object' in res assert 'new_jobs' in res res['int_load_results'] = int_load_results res['int_compute'] = int_compute res['int_gc'] = IntervalTimer() return res else: int_compute = IntervalTimer() user_object = command(*args, **kwargs) int_compute.stop() res = dict(user_object=user_object, new_jobs=[]) res['int_load_results'] = int_load_results res['int_compute'] = int_compute res['int_gc'] = IntervalTimer() return res
def __init__( self, cc, qapp, parent, job_prefix, output_dir, extra_dep=[], resource_manager=None, extra_report_keys=None, report_manager=None, ): check_isinstance(cc, Context) check_isinstance(parent, (CompmakeContext, NoneType)) self.cc = cc # can be removed once subtask() is removed self._qapp = qapp # only used for count invocation self._parent = parent self._job_prefix = job_prefix if resource_manager is None: resource_manager = ResourceManager(self) if report_manager is None: self.private_report_manager = True # only create indexe if this is true reports = os.path.join(output_dir, "report") reports_index = os.path.join(output_dir, "report.html") report_manager = ReportManager(self, reports, reports_index) else: self.private_report_manager = False self._report_manager = report_manager self._resource_manager = resource_manager self._output_dir = output_dir self.n_comp_invocations = 0 self._extra_dep = extra_dep self._jobs = {} if extra_report_keys is None: extra_report_keys = {} self.extra_report_keys = extra_report_keys self._promise = None self.branched_contexts = [] self.branched_children = []
def __init__(self, width, control_points, *args, **kwargs): PlacedObject.__init__(self, *args, **kwargs) self.width = float(width) self.control_points = control_points for p in control_points: check_isinstance(p, SE2Transform) for i in range(len(control_points) - 1): a = control_points[i] b = control_points[i + 1] ta, _ = geo.translation_angle_from_SE2(a.as_SE2()) tb, _ = geo.translation_angle_from_SE2(b.as_SE2()) d = np.linalg.norm(ta - tb) if d < 0.001: msg = 'Two points are two close: \n%s\n%s' % (a, b) raise ValueError(msg)
def count_resources(context, the_job): db = context.get_compmake_db() cache = get_job_cache(the_job, db=db) if cache.state != Cache.DONE: msg = 'The job %s was supposed to be finished: %s' % (the_job, cache) raise Exception(msg) cq = CacheQueryDB(db) children = cq.tree_children_and_uodeps(the_job) check_isinstance(children, set) children.add(the_job) res = {} for j in children: res[j] = context.comp_dynamic(my_get_job_cache, j, extra_dep=[Promise(j)], job_id='count-%s-%s' % (the_job, j)) return context.comp(finalize_result, res)
def result_dict_check(res): check_isinstance(res, dict) if 'new_jobs' in res: msg = 'Invalid result dict: %r' % res assert 'new_jobs' in res, msg assert 'deleted_jobs' in res, msg assert 'user_object_deps' in res, msg elif 'fail' in res: assert 'deleted_jobs' in res, msg elif 'bug' in res: pass elif 'abort' in res: pass elif 'interrupted' in res: assert 'deleted_jobs' in res, msg pass else: msg = 'Malformed result dict: %s' % res raise ValueError(msg)
def __getitem__(self, item: Union[str, FQN]) -> 'PlacedObject': """ Either url-like: child1/sub . or tuple like: ('child1', 'sub') () :param item: :return: """ if isinstance(item, str): item = fqn_from_url(item) check_isinstance(item, tuple) return self.get_object_from_fqn(item)
def junit_xml(compmake_db): from junit_xml import TestSuite jobs = list(all_jobs(compmake_db)) logger.info('Loaded %d jobs' % len(jobs)) N = 10 if len(jobs) < N: logger.error('too few jobs (I expect at least %s)' % N) sys.exit(128) test_cases = [] for job_id in jobs: tc = junit_test_case_from_compmake(compmake_db, job_id) test_cases.append(tc) ts = TestSuite("comptests_test_suite", test_cases) res = TestSuite.to_xml_string([ts]) check_isinstance(res, six.text_type) return res
def reset_config(): # Reset all the config setattr(GlobalConfig, '_dirs', []) for _, m in GlobalConfig._masters.items(): from conf_tools.master import ConfigMaster check_isinstance(m, ConfigMaster) for spec in m.specs.values(): spec.clear() spec.clear() # List of files already read spec.files_read = set() spec.dirs_read = [] spec.dirs_to_read = [] # ID -> file where it was found spec.entry2file = {} spec.templates = {} # all the dirs that were passed to load(), in case we miss any setattr(m, '_dirs', [])
def job_compute(job, context): """ Returns a dictionary with fields "user_object" and "new_jobs" """ check_isinstance(job, Job) job_id = job.job_id db = context.get_compmake_db() command, args, kwargs = get_cmd_args_kwargs(job_id, db=db) if job.needs_context: args = tuple(list([context]) + list(args)) res = execute_with_context(db=db, context=context, job_id=job_id, command=command, args=args, kwargs=kwargs) assert isinstance(res,dict) assert len(res) == 2, list(res.keys()) assert 'user_object' in res assert 'new_jobs' in res return res else: res = command(*args, **kwargs) return dict(user_object=res, new_jobs=[])
def compmake_execution_stats(context, promise, use_job_id=None): """ Returns a promise for a the execution stats of a job and its dependencies. """ check_isinstance(promise, (Promise, str)) if isinstance(promise, Promise): job_id = promise.job_id elif isinstance(promise, str): job_id = promise promise = Promise(job_id) else: assert False p2 = context.comp(dummy, promise) if use_job_id is not None: context.comp_prefix(None) return context.comp_dynamic(count_resources, the_job=job_id, job_id=use_job_id, extra_dep=p2) else: return context.comp_dynamic(count_resources, the_job=job_id, extra_dep=p2)
def comptest_to_junit_main(): args = sys.argv[1:] if not args: msg = 'Require the path to a Compmake DB.' raise UserError(msg) dirname = args[0] # try compressed try: db = StorageFilesystem(dirname, compress=True) except Exception: db = StorageFilesystem(dirname, compress=False) jobs = list(all_jobs(db)) if not jobs: msg = 'Could not find any job, compressed or not.' logger.error(msg) sys.exit(1) s = junit_xml(db) check_isinstance(s, six.text_type) s = s.encode('utf8') sys.stdout.buffer.write(s)
def location(line, col, s): from .utils import check_isinstance check_isinstance(line, int) check_isinstance(col, int) check_isinstance(s, six.string_types) lines = s.split('\n') previous_lines = sum(len(l) + len('\n') for l in lines[:line]) offset = col return previous_lines + offset
def junit_test_case_from_compmake(db, job_id): from junit_xml import TestCase cache = get_job_cache(job_id, db=db) if cache.state == Cache.DONE: # and cache.done_iterations > 1: # elapsed_sec = cache.walltime_used elapsed_sec = cache.cputime_used else: elapsed_sec = None check_isinstance(cache.captured_stderr, (type(None), six.text_type)) check_isinstance(cache.captured_stdout, (type(None), six.text_type)) check_isinstance(cache.exception, (type(None), six.text_type)) stderr = remove_escapes(cache.captured_stderr) stdout = remove_escapes(cache.captured_stdout) tc = TestCase(name=job_id, classname=None, elapsed_sec=elapsed_sec, stdout=stdout, stderr=stderr) if cache.state == Cache.FAILED: message = cache.exception output = cache.exception + "\n" + cache.backtrace tc.add_failure_info(message, output) return tc
def stats_from_cache(cache): check_isinstance(cache, Cache) return dict(walltime=cache.walltime_used, cputime=cache.cputime_used)
def write_screen_line(s): """ Writes and pads """ # TODO: check that it is not too long check_isinstance(s, six.text_type) s = pad_to_screen(s) write_line_endl(s)
def parmake_job2(args): """ args = tuple job_id, context, queue_name, show_events Returns a dictionary with fields "user_object", "new_jobs", 'delete_jobs'. "user_object" is set to None because we do not want to load in our thread if not necessary. Sometimes it is necessary because it might contain a Promise. """ job_id, context, event_queue_name, show_output = args # @UnusedVariable check_isinstance(job_id, str) check_isinstance(event_queue_name, str) from .pmake_manager import PmakeManager event_queue = PmakeManager.queues[event_queue_name] db = context.get_compmake_db() setproctitle('compmake:%s' % job_id) class G(): nlostmessages = 0 try: # We register a handler for the events to be passed back # to the main process def handler( event): try: if not CompmakeConstants.disable_interproc_queue: event_queue.put(event, block=False) except Full: G.nlostmessages += 1 # Do not write messages here, it might create a recursive # problem. # sys.stderr.write('job %s: Queue is full, message is lost.\n' # % job_id) remove_all_handlers() if show_output: register_handler("*", handler) def proctitle(event): stat = '[%s/%s %s] (compmake)' % (event.progress, event.goal, event.job_id) setproctitle(stat) register_handler("job-progress", proctitle) publish(context, 'worker-status', job_id=job_id, status='started') # Note that this function is called after the fork. # All data is conserved, but resources need to be reopened try: db.reopen_after_fork() # @UndefinedVariable except: pass publish(context, 'worker-status', job_id=job_id, status='connected') res = make(job_id, context=context) publish(context, 'worker-status', job_id=job_id, status='ended') res['user_object'] = None result_dict_check(res) return res except KeyboardInterrupt: assert False, 'KeyboardInterrupt should be captured by make() (' \ 'inside Job.compute())' except JobInterrupted: publish(context, 'worker-status', job_id=job_id, status='interrupted') raise except JobFailed: raise except BaseException: # XXX raise except: raise finally: publish(context, 'worker-status', job_id=job_id, status='cleanup') setproctitle('compmake-worker-finished')
def eval_ops(ops, context, cq): """ Evaluates an expression. ops: list of strings and int representing operators """ check_isinstance(ops, list) def list_split(l, index): """ Splits a list in two """ return l[0:index], l[index + 1:] # The sequence of the following operations # defines the associativity rules # in > except > not if Operators.INTERSECTION in ops: left, right = list_split(ops, ops.index(Operators.INTERSECTION)) if not left or not right: msg = ''' INTERSECTION requires only a right argument. Interpreting "%s" INTERSECTION "%s". ''' % ( ' '.join(left), ' '.join(right)) raise CompmakeSyntaxError(msg) left = eval_ops(ops=left, context=context, cq=cq) right = set(eval_ops(ops=right, context=context, cq=cq)) for x in left: if x in right: yield x elif Operators.DIFFERENCE in ops: left, right = list_split(ops, ops.index(Operators.DIFFERENCE)) if not left or not right: msg = ''' EXCEPT requires a left and right argument. Interpreting "%s" EXCEPT "%s". ''' % ( ' '.join(left), ' '.join(right)) raise CompmakeSyntaxError(msg) left = eval_ops(ops=left, context=context, cq=cq) right = set(eval_ops(ops=right, context=context, cq=cq)) for x in left: if x not in right: yield x elif Operators.NOT in ops: left, right = list_split(ops, ops.index(Operators.NOT)) if left or not right: # forbid left, require right msg = ( ''' NOT requires only a right argument. Interpreting "%s" NOT "%s". ''' % (' '.join(left), ' '.join(right))) raise CompmakeSyntaxError(msg) right_res = set(eval_ops(ops=right, context=context, cq=cq)) # if not all_jobs: # assert False # print("NOT") # print(' all_jobs evalatued to %r' % (all_jobs)) # print(' right ops %r evalatued to %r' % (right, right_res)) # result = [] for x in cq.all_jobs(): if not x in right_res: yield x # # in_right = x in right_res # print(' is %r in not set -> %s' % (x, # in_right)) # if not in_right: # result.append(x) # print(' result -> %s' % result) # for x in result: # yield x else: # no operators: simple list # cannot do this anymore, now it's a generator. # assert_list_of_strings(ops) for x in expand_job_list_tokens(ops, context=context, cq=cq): yield x
def comp_(context, command_, *args, **kwargs): """ Main method to define a computation step. Extra arguments: :arg:job_id: sets the job id (respects job_prefix) :arg:extra_dep: extra dependencies (not passed as arguments) :arg:command_name: used to define job name if job_id not provided. If not given, command_.__name__ is used. :arg:needs_context: if this is a dynamic job Raises UserError if command is not pickable. """ db = context.get_compmake_db() command = command_ if hasattr(command, '__module__') and command.__module__ == '__main__': if not command in WarningStorage.warned: if WarningStorage.warned: # already warned for another function msg = ('(Same warning for function %r.)' % command.__name__) else: msg = ("A warning about the function %r: " % command.__name__) msg += ( "This function is defined directly in the __main__ " "module, " "which means that it cannot be pickled correctly due to " "a limitation of Python and 'make new_process=1' will " "fail. " "For best results, please define functions in external " "modules. " 'For more info, read ' 'http://stefaanlippens.net/pickleproblem ' 'and the bug report http://bugs.python.org/issue5509.') warning(msg) WarningStorage.warned.add(command) if get_compmake_status() == CompmakeConstants.compmake_status_slave: return None # Check that this is a pickable function try: try_pickling(command) except Exception as e: msg = ('Cannot pickle function. Make sure it is not a lambda ' 'function or a nested function. (This is a limitation of ' 'Python)') raise_wrapped(UserError, e, msg, command=command) if CompmakeConstants.command_name_key in kwargs: command_desc = kwargs.pop(CompmakeConstants.command_name_key) elif hasattr(command, '__name__'): command_desc = command.__name__ else: command_desc = type(command).__name__ args = list(args) # args is a non iterable tuple # Get job id from arguments if CompmakeConstants.job_id_key in kwargs: # make sure that command does not have itself a job_id key try: argspec = inspect.getargspec(command) except TypeError: # Assume Cython function # XXX: write test pass else: if CompmakeConstants.job_id_key in argspec.args: msg = ("You cannot define the job id in this way because %r " "is already a parameter of this function." % CompmakeConstants.job_id_key) raise UserError(msg) job_id = kwargs[CompmakeConstants.job_id_key] check_isinstance(job_id, six.string_types) if ' ' in job_id: msg = 'Invalid job id: %r' % job_id raise UserError(msg) job_prefix = context.get_comp_prefix() if job_prefix: job_id = '%s-%s' % (job_prefix, job_id) del kwargs[CompmakeConstants.job_id_key] if context.was_job_defined_in_this_session(job_id): # unless it is dynamically geneterated if not job_exists(job_id, db=db): msg = 'The job %r was defined but not found in DB. I will let it slide.' % job_id print(msg) else: msg = 'The job %r was already defined in this session.' % job_id old_job = get_job(job_id, db=db) msg += '\n old_job.defined_by: %s ' % old_job.defined_by msg += '\n context.currently_executing: %s ' % context.currently_executing msg += ' others defined in session: %s' % context.get_jobs_defined_in_this_session() print(msg) # warnings.warn('I know something is more complicated here') # if old_job.defined_by is not None and # old_job.defined_by == context.currently_executing: # # exception, it's ok # pass # else: msg = 'Job %r already defined.' % job_id raise UserError(msg) else: if job_exists(job_id, db=db): # ok, you gave us a job_id, but we still need to check whether # it is the same job stack = context.currently_executing defined_by = get_job(job_id, db=db).defined_by if defined_by == stack: # this is the same job-redefining pass else: for i in range(1000): # XXX n = '%s-%d' % (job_id, i) if not job_exists(n, db=db): job_id = n break if False: print( 'The job_id %r was given explicitly but already ' 'defined.' % job_id) print('current stack: %s' % stack) print(' its stack: %s' % defined_by) print('New job_id is %s' % job_id) else: job_id = generate_job_id(command_desc, context=context) context.add_job_defined_in_this_session(job_id) # could be done better if 'needs_context' in kwargs: needs_context = True del kwargs['needs_context'] else: needs_context = False if CompmakeConstants.extra_dep_key in kwargs: extra_dep = kwargs[CompmakeConstants.extra_dep_key] del kwargs[CompmakeConstants.extra_dep_key] if not isinstance(extra_dep, (list, Promise)): msg = ('The "extra_dep" argument must be a list of promises; ' 'got: %s' % describe_value(extra_dep)) raise ValueError(msg) if isinstance(extra_dep, Promise): extra_dep = [extra_dep] assert isinstance(extra_dep, list) for ed in extra_dep: if not isinstance(ed, Promise): msg = ('The "extra_dep" argument must be a list of promises; ' 'got: %s' % describe_value(extra_dep)) raise ValueError(msg) extra_dep = collect_dependencies(extra_dep) else: extra_dep = set() children = collect_dependencies([args, kwargs]) children.update(extra_dep) for c in children: if not job_exists(c, db): msg = "Job %r references a job %r that doesnt exist." % (job_id, c) raise ValueError(msg) all_args = (command, args, kwargs) assert len(context.currently_executing) >= 1 assert context.currently_executing[0] == 'root' c = Job(job_id=job_id, children=children, command_desc=command_desc, needs_context=needs_context, defined_by=context.currently_executing) # Need to inherit the pickle if context.currently_executing[-1] != 'root': parent_job = get_job(context.currently_executing[-1], db) c.pickle_main_context = parent_job.pickle_main_context if job_exists(job_id, db): old_job = get_job(job_id, db) if old_job.defined_by != c.defined_by: warning('Redefinition of %s: ' % job_id) warning(' cur defined_by: %s' % c.defined_by) warning(' old defined_by: %s' % old_job.defined_by) if old_job.children != c.children: #warning('Redefinition problem:') #warning(' old children: %s' % (old_job.children)) #warning(' old dyn children: %s' % old_job.dynamic_children) #warning(' new children: %s' % (c.children)) # fixing this for x, deps in old_job.dynamic_children.items(): if not x in c.children: # not a child any more # FIXME: ok but note it might be a dependence of a child # continue pass c.dynamic_children[x] = deps for j in deps: if not j in c.children: c.children.add(j) if old_job.parents != c.parents: # warning('Redefinition of %s: ' % job_id) # warning(' cur parents: %s' % (c.parents)) # warning(' old parents: %s' % old_job.parents) for p in old_job.parents: c.parents.add(p) # TODO: preserve defines # from compmake.ui.visualization import info # info('defining job %r with children %r' % (job_id, # c.children)) # if True or c.defined_by == ['root']: for child in children: db_job_add_parent_relation(child=child, parent=job_id, db=db) if get_compmake_config('check_params') and job_exists(job_id, db): # OK, this is going to be black magic. # We want to load the previous job definition, # however, by unpickling(), it will start # __import__()ing the modules, perhaps # even the one that is calling us. # What happens, then is that it will try to # add another time this computation recursively. # What we do, is that we temporarely switch to # slave mode, so that recursive calls to comp() # are disabled. # old_status = get_compmake_status() # set_compmake_status( # CompmakeConstants.compmake_status_slave) all_args_old = get_job_args(job_id, db=db) # set_compmake_status(old_status) same, reason = same_computation(all_args, all_args_old) if not same: #print('different job, cleaning cache:\n%s ' % reason) from compmake.jobs.actions import clean_targets clean_targets([job_id], db) # if job_cache_exists(job_id, db): # delete_job_cache(job_id, db) publish(context, 'job-redefined', job_id=job_id, reason=reason) else: # print('ok, same job') pass # XXX TODO clean the cache # else: # publish(context, 'job-already-defined', # job_id=job_id) set_job_args(job_id, all_args, db=db) set_job(job_id, c, db=db) publish(context, 'job-defined', job_id=job_id) return Promise(job_id)
def write_line_endl(x): check_isinstance(x, six.text_type) stream.buffer.write(x.encode('utf8')) stream.buffer.write(b'\n') stream.flush()
def __init__(self, nid, data, cols=None, rows=None, fmt=None, caption=None): """ :type data: (array[R](fields[C]) | array[RxC] | list[R](list[C]) ), R>0, C>0 :type cols: None|list[C](str) :type rows: None|list[R](str) :type caption: None|str """ if fmt is None: fmt = "%s" self.fmt = fmt Node.__init__(self, nid) check_isinstance(data, (list, np.ndarray)) if isinstance(data, list): # check minimum length if len(data) == 0: raise ValueError("Expected at least one row") # check that all of them are lists with same type for row in data: check_isinstance(row, list) if not len(row) == len(data[0]): msg = "I want all rows to be the same length" " Got %s != %s." % (len(row), len(data[0])) raise ValueError(msg) # create numpy array nrows = len(data) ncols = len(data[0]) if ncols == 0: raise ValueError("At least one column expected") if cols is None: cols = [""] * ncols if rows is None: rows = [""] * nrows elif isinstance(data, np.ndarray): if not data.ndim in [1, 2]: msg = "Expected array of 1D or 2D shape, got %s." % describe_value(data) raise ValueError(msg) if data.ndim == 1: # use fields name if desc not provided if cols is None: # and data.dtype.fields is not None: cols = list(data.dtype.fields) nrows = len(data) if rows is None: rows = [""] * nrows lol = [] for row in data: lol.append(list(row)) data = lol elif data.ndim == 2: if data.dtype.fields is not None: msg = ( "Cannot convert ndarray to table using " "the heuristics that I know (received: %s). " % describe_value(data) ) raise ValueError(msg) nrows = data.shape[0] ncols = data.shape[1] if rows is None: rows = [""] * nrows if cols is None: cols = [""] * ncols data = data.tolist() else: assert False # # check_multiple([ (cols, 'list[C](str|None),C>0'), # (rows, 'list[R](str|None),R>0'), # (data, 'list[R](list[C])'), # (caption, 'str|None') ]) # print('cols', cols) # print('rows', rows) # print('data', data) # print('cols', cols) self.data = data self.cols = cols self.rows = rows self.caption = caption
def mvac_job(args): """ args = tuple job_id, context, queue_name, show_events Returns a dictionary with fields "user_object", "new_jobs", 'delete_jobs'. "user_object" is set to None because we do not want to load in our thread if not necessary. Sometimes it is necessary because it might contain a Promise. """ job_id, context, event_queue_name, show_output, volumes, cwd = args # @UnusedVariable check_isinstance(job_id, str) check_isinstance(event_queue_name, str) # Disable multyvac logging disable_logging_if_config(context) db = context.get_compmake_db() job = get_job(job_id=job_id, db=db) if job.needs_context: msg = 'Cannot use multyvac for dynamic job.' raise CompmakeException(msg) time_start = time.time() multyvac_job = mvac_instance(db, job_id, volumes, cwd) multyvac_job.wait() errors = [multyvac_job.status_error, multyvac_job.status_killed] if multyvac_job.status in errors: e = 'Multyvac error (status: %r)' % multyvac_job.status bt = str(multyvac_job.stderr) cache = Cache(Cache.FAILED) cache.exception = e cache.backtrace = bt cache.timestamp = time.time() cache.captured_stderr = str(multyvac_job.stderr) cache.captured_stdout = str(multyvac_job.stdout) set_job_cache(job_id, cache, db=db) raise JobFailed(job_id=job_id, reason=str(e), bt=bt) user_object = multyvac_job.result user_object_deps = collect_dependencies(user_object) set_job_userobject(job_id, user_object, db=db) cache = get_job_cache(job_id, db=db) cache.captured_stderr = str(multyvac_job.stderr) cache.captured_stdout = str(multyvac_job.stdout) cache.state = Cache.DONE cache.timestamp = time.time() walltime = cache.timestamp - time_start cache.walltime_used = walltime cache.cputime_used = multyvac_job.cputime_system cache.host = 'multyvac' cache.jobs_defined = set() set_job_cache(job_id, cache, db=db) result_dict = dict(user_object=user_object, user_object_deps=user_object_deps, new_jobs=[], deleted_jobs=[]) result_dict_check(result_dict) return result_dict