Exemple #1
0
def identify_main():
    usage = None
    parser = argparse.ArgumentParser(usage=usage)

    parser.add_argument('--image', default=None)

    parser.add_argument('--command', default=None)

    parsed = parser.parse_args()

    image = parsed.image
    if image is not None:
        ni: NodeInfo = identify_image2(image)
    elif parsed.command is not None:
        command = parsed.command.split()
        ni: NodeInfo = identify_command(command)
    else:
        msg = 'Please specify either --image or --command'
        logger.error(msg)
        sys.exit(1)

    print('\n\n')
    print(indent(describe_nd(ni.nd), '', 'desc: '))
    print('\n\n')
    print(indent(describe_bd(ni.bd), '', 'build: '))
    print('\n\n')
    print(indent(describe_cd(ni.cd), '', 'config: '))
    print('\n\n')
    print(indent(describe(ni.pd.data), '', 'data: '))
    print('\n\n')
    print(indent(describe(ni.pd.meta), '', 'meta: '))
def check_compatible_protocol(p1: InteractionProtocol, p2: InteractionProtocol):
    """ Checks that p1 is a subprotocol of p2 """
    try:
        # check input compatibility
        # we should have all inputs
        for k, v2 in p2.inputs.items():

            if not k in p1.inputs:
                msg = f'First protocol misses input "{k}".'
                raise IncompatibleProtocol(msg)
            v1 = p1.inputs[k]
            can, why = can_be_used_as(v1, v2)
            if not can:
                msg = f'For input "{k}", cannot use type {v1} as {v2}: {why}'
                raise IncompatibleProtocol(msg)

        # check output compatibility
        # we should have all inputs
        for k, v2 in p2.outputs.items():
            if not k in p1.outputs:
                msg = f'First protocol misses output "{k}".'
                raise IncompatibleProtocol(msg)
            v1 = p1.outputs[k]
            can, why = can_be_used_as(v1, v2)
            if not can:
                msg = f'For output "{k}", cannot use type {v1} as {v2}: {why}'
                raise IncompatibleProtocol(msg)
            # XXX: to finish
    except IncompatibleProtocol as e:
        msg = 'Cannot say that p1 is a sub-protocol of p2'
        msg += '\n' + indent(p1, '|', 'p1: |')
        msg += '\n' + indent(p2, '|', 'p2: |')
        raise IncompatibleProtocol(msg) from e
Exemple #3
0
def run_example(name, command, expect_fail=False):
    examples = get_examples_path()
    pyfile = os.path.join(examples, '%s.py' % name)
    if not os.path.exists(pyfile):
        msg = 'Example file does not exist: %s' % pyfile
        raise Exception(msg)

    with create_tmp_dir() as cwd:
        cmd = [pyfile, command]
        try:
            res = system_cmd_result(cwd, cmd, 
                              display_stdout=False,
                              display_stderr=False,
                              raise_on_error=True)
            if expect_fail:
                msg = 'Expected failure of %s but everything OK.' % name
                msg += '\n cwd = %s'  % cwd 
                msg += '\n' + indent(res.stderr, 'stderr| ')
                msg += '\n' + indent(res.stdout, 'stdout| ')
                raise Exception(msg)
            return res
        except CmdException as e:
            stderr = e.res.stderr
            stdout = e.res.stdout
            if not expect_fail:
                msg = ('Example %r: Command %r failed unexpectedly.' % 
                       (name, command))
                msg += '\n retcode: %r' % e.res.ret
                msg += '\n' + indent(stderr, 'stderr| ')
                msg += '\n' + indent(stdout, 'stdout| ')
                raise Exception(msg)
def run_example(name, command, expect_fail=False):
    examples = get_examples_path()
    pyfile = os.path.join(examples, '%s.py' % name)
    if not os.path.exists(pyfile):
        msg = 'Example file does not exist: %s' % pyfile
        raise Exception(msg)

    with create_tmp_dir() as cwd:
        cmd = [pyfile, command]
        try:
            res = system_cmd_result(cwd,
                                    cmd,
                                    display_stdout=False,
                                    display_stderr=False,
                                    raise_on_error=True)
            if expect_fail:
                msg = 'Expected failure of %s but everything OK.' % name
                msg += '\n cwd = %s' % cwd
                msg += '\n' + indent(res.stderr, 'stderr| ')
                msg += '\n' + indent(res.stdout, 'stdout| ')
                raise Exception(msg)
            return res
        except CmdException as e:
            stderr = e.res.stderr
            stdout = e.res.stdout
            if not expect_fail:
                msg = ('Example %r: Command %r failed unexpectedly.' %
                       (name, command))
                msg += '\n retcode: %r' % e.res.ret
                msg += '\n' + indent(stderr, 'stderr| ')
                msg += '\n' + indent(stdout, 'stdout| ')
                raise Exception(msg)
Exemple #5
0
    def call_recursive(self,
                       context,
                       child_name,
                       cmd_class,
                       args,
                       extra_dep: List = None,
                       add_outdir=None,
                       add_job_prefix=None,
                       separate_resource_manager=False,
                       separate_report_manager=False,
                       extra_report_keys=None):
        if extra_dep is None:
            extra_dep = []
        instance = cmd_class()
        instance.set_parent(self)
        is_quickapp = isinstance(instance, QuickApp)

        try:
            # we are already in a context; just define jobs
            child_context = context.child(
                qapp=instance,
                name=child_name,
                extra_dep=extra_dep,
                add_outdir=add_outdir,
                extra_report_keys=extra_report_keys,
                separate_resource_manager=separate_resource_manager,
                separate_report_manager=separate_report_manager,
                add_job_prefix=add_job_prefix)  # XXX

            if isinstance(args, list):
                instance.set_options_from_args(args)
            elif isinstance(args, dict):
                instance.set_options_from_dict(args)
            else:
                assert False

            if not is_quickapp:
                self.child_context = child_context
                res = instance.go()
            else:
                instance.context = child_context
                res = instance.define_jobs_context(child_context)

            # Add his jobs to our list of jobs
            context._jobs.update(child_context.all_jobs_dict())
            return res

        except Exception as e:
            msg = 'While trying to run  %s\n' % cmd_class.__name__
            msg += 'with arguments = %s\n' % args
            if '_options' in instance.__dict__:
                msg += ' parsed options: %s\n' % instance.get_options()
                msg += ' params: %s\n' % instance.get_options().get_params()
            if isinstance(e, QuickAppException):
                msg += indent(str(e), '> ')
            else:
                msg += indent(traceback.format_exc(), '> ')
            raise QuickAppException(msg)
def parmake_job2_new_process(args):
    """ Starts the job in a new compmake process. """
    (job_id, context) = args
    compmake_bin = which("compmake")

    db = context.get_compmake_db()
    storage = db.basepath  # XXX:
    where = os.path.join(storage, "parmake_job2_new_process")
    if not os.path.exists(storage):
        try:
            os.makedirs(storage)
        except:
            pass

    out_result = os.path.join(where, "%s.results.pickle" % job_id)
    out_result = os.path.abspath(out_result)
    cmd = [compmake_bin, storage]

    if not all_disabled():
        cmd += ["--contracts"]

    cmd += [
        "--status_line_enabled",
        "0",
        "--colorize",
        "0",
        "-c",
        "make_single out_result=%s %s" % (out_result, job_id),
    ]

    cwd = os.getcwd()
    cmd_res = system_cmd_result(
        cwd, cmd, display_stdout=False, display_stderr=False, raise_on_error=False, capture_keyboard_interrupt=False
    )
    ret = cmd_res.ret

    if ret == CompmakeConstants.RET_CODE_JOB_FAILED:  # XXX:
        msg = "Job %r failed in external process" % job_id
        msg += indent(cmd_res.stdout, "stdout| ")
        msg += indent(cmd_res.stderr, "stderr| ")

        res = safe_pickle_load(out_result)
        os.unlink(out_result)
        result_dict_check(res)

        raise JobFailed.from_dict(res)

    elif ret != 0:
        msg = "Host failed while doing %r" % job_id
        msg += "\n cmd: %s" % " ".join(cmd)
        msg += "\n" + indent(cmd_res.stdout, "stdout| ")
        msg += "\n" + indent(cmd_res.stderr, "stderr| ")
        raise CompmakeBug(msg)  # XXX:

    res = safe_pickle_load(out_result)
    os.unlink(out_result)
    result_dict_check(res)
    return res
Exemple #7
0
    def get(self, timeout=0):  # @UnusedVariable
        if not self.told_you_ready:
            raise CompmakeBug("I didnt tell you it was ready.")
        if self.already_read:
            msg = 'Compmake BUG: should not call twice.'
            raise CompmakeBug(msg)
        self.already_read = True

        assert os.path.exists(self.retcode)
        ret_str = open(self.retcode, 'r').read()
        try:
            ret = int(ret_str)
        except ValueError:
            msg = 'Could not interpret file %r: %r.' % (self.retcode, ret_str)
            raise HostFailed(host='localhost',
                             job_id=self.job_id,
                             reason=msg,
                             bt='')
            #
        #
        #         raise HostFailed(host="xxx",
        #                                      job_id=self.job_id,
        # reason=reason, bt="")  # XXX
        #

        try:
            stderr = open(self.stderr, 'r').read()
            stdout = open(self.stdout, 'r').read()

            stderr = 'Contents of %s:\n' % self.stderr + stderr
            stdout = 'Contents of %s:\n' % self.stdout + stdout

            # if ret == CompmakeConstants.RET_CODE_JOB_FAILED:
            #                 msg = 'SGE Job failed (ret: %s)\n' % ret
            #                 msg += indent(stderr, '| ')
            #                 # mark_as_failed(self.job_id, msg, None)
            #                 raise JobFailed(msg)
            #             elif ret != 0:
            #                 msg = 'SGE Job failed (ret: %s)\n' % ret
            #                 error(msg)
            #                 msg += indent(stderr, '| ')
            #                 raise JobFailed(msg)

            if not os.path.exists(self.out_results):
                msg = 'job succeeded but no %r found' % self.out_results
                msg += '\n' + indent(stderr, 'stderr')
                msg += '\n' + indent(stdout, 'stdout')
                raise CompmakeBug(msg)

            res = safe_pickle_load(self.out_results)
            result_dict_raise_if_error(res)
            return res
        finally:
            fs = [self.stderr, self.stdout, self.out_results, self.retcode]
            for filename in fs:
                if os.path.exists(filename):
                    os.unlink(filename)
Exemple #8
0
def get_object_tree(
    po: PlacedObject,
    levels: int = 100,
    spatial_relations: bool = False,
    attributes: bool = False,
) -> str:
    ss = []
    ss.append("%s" % type(po).__name__)
    d = po.params_to_json_dict()
    d.pop("children", None)
    d.pop("spatial_relations", None)

    if attributes:
        if d:
            ds = yaml.safe_dump(
                d,
                encoding="utf-8",
                indent=4,
                allow_unicode=True,
                default_flow_style=False,
            )
            if isinstance(ds, bytes):
                ds = ds.decode("utf-8")
            ss.append("\n" + indent(ds, " "))

    if po.children and levels >= 1:
        ss.append("")
        N = len(po.children)
        for i, (child_name, child) in enumerate(po.children.items()):

            if i != N - 1:
                prefix1 = u"├ %s ┐ " % child_name
                prefix2 = u"│ %s │ " % (" " * len(child_name))
            else:
                prefix1 = u"└ %s ┐ " % child_name
                prefix2 = u"  %s │ " % (" " * len(child_name))
            c = get_object_tree(
                child,
                attributes=attributes,
                spatial_relations=spatial_relations,
                levels=levels - 1,
            )
            sc = indent(c, prefix2, prefix1)
            n = max(len(_) for _ in sc.split("\n"))
            sc += "\n" + prefix2[:-2] + u"└" + u"─" * (n - len(prefix2) + 3)
            ss.append(sc)

    if spatial_relations:
        if po.spatial_relations and levels >= 1:
            ss.append("")
            for r_name, rel in po.spatial_relations.items():
                ss.append(
                    '- from "%s" to "%s"  %s ' %
                    (url_from_fqn(rel.a), url_from_fqn(rel.b), rel.transform))

    return "\n".join(ss)
Exemple #9
0
    def get(self, timeout=0):  # @UnusedVariable
        if not self.told_you_ready:
            raise CompmakeBug("I didnt tell you it was ready.")
        if self.already_read:
            msg = 'Compmake BUG: should not call twice.'
            raise CompmakeBug(msg)
        self.already_read = True

        assert os.path.exists(self.retcode)
        ret_str = open(self.retcode, 'r').read()
        try:
            ret = int(ret_str)
        except ValueError:
            msg = 'Could not interpret file %r: %r.' % (self.retcode, ret_str)
            raise HostFailed(host='localhost',
                             job_id=self.job_id, reason=msg, bt='')
            #
        #
        #         raise HostFailed(host="xxx",
        #                                      job_id=self.job_id,
        # reason=reason, bt="")  # XXX
        #

        try:
            stderr = open(self.stderr, 'r').read()
            stdout = open(self.stdout, 'r').read()

            stderr = 'Contents of %s:\n' % self.stderr + stderr
            stdout = 'Contents of %s:\n' % self.stdout + stdout

            # if ret == CompmakeConstants.RET_CODE_JOB_FAILED:
            #                 msg = 'SGE Job failed (ret: %s)\n' % ret
            #                 msg += indent(stderr, '| ')
            #                 # mark_as_failed(self.job_id, msg, None)
            #                 raise JobFailed(msg)
            #             elif ret != 0:
            #                 msg = 'SGE Job failed (ret: %s)\n' % ret
            #                 error(msg)
            #                 msg += indent(stderr, '| ')
            #                 raise JobFailed(msg)

            if not os.path.exists(self.out_results):
                msg = 'job succeeded but no %r found' % self.out_results
                msg += '\n' + indent(stderr, 'stderr')
                msg += '\n' + indent(stdout, 'stdout')
                raise CompmakeBug(msg)

            res = safe_pickle_load(self.out_results)
            result_dict_raise_if_error(res)
            return res
        finally:
            fs = [self.stderr, self.stdout, self.out_results, self.retcode]
            for filename in fs:
                if os.path.exists(filename):
                    os.unlink(filename)
Exemple #10
0
def format_example(e):
    print('Filename: %s' % e.filename)
    text = ''
    # text += '<h2>Example <tt>%s</tt></h2>\n\n' % e.shortname

    text += e.description
    text += '\n\n'
    # text += "MCDPL Code:"
    code = e.mcdp

    # code = code.replace('mcdp', '<strong>mcdp</strong>')
    text += '\n\n'
    try:
        text += ast_to_html(code)
    except BaseException as err:
        text += '<pre>%s</pre>' % err

    if False:
        text += '<pre class="mcdp"><code class="mcdp">\n'

        def escape(x):
            x = x.replace('<', '&lt;')
            return x

        text += indent(higlight_chunk(escape(code)), ' ' * 4)

        text += '\n</code></pre>\n'
        text += '\n\n'

    # plots = ['ndp_clean']
    plots = ['ndp_default']
    try:
        outputs = do_plots(e.filename, plots, outdir, extra_params="")
    except DPSemanticError as e:
        text += '\n\n<code class="DPSemanticError">%s</code>\n\n' % indent(
            str(e), ' ' * 4)
    except DPInternalError as e:
        text += '\n\n<code class="DPInternalError">%s</code>\n\n' % indent(
            str(e), ' ' * 4)
    else:

        for ext, name, data in outputs:
            out = os.path.splitext(e.filename)[0] + '-%s.%s' % (name, ext)
            with open(out, 'w') as f:
                f.write(data)
            if ext in ['png']:
                imgurl = os.path.relpath(out, os.path.dirname(e.filename))
                print('url: %r' % imgurl)
                text += '\n <img class="output" src="%s"/> \n\n' % (imgurl)

    text += '\n\n'

    return text
Exemple #11
0
    def call_recursive(self, context, child_name, cmd_class, args,
                       extra_dep=[],
                       add_outdir=None,
                       add_job_prefix=None,
                       separate_resource_manager=False,
                       separate_report_manager=False,
                       extra_report_keys=None):     
        instance = cmd_class()
        instance.set_parent(self)
        is_quickapp = isinstance(instance, QuickApp) 

        try:
            # we are already in a context; just define jobs
            child_context = context.child(qapp=instance, name=child_name,
                                          extra_dep=extra_dep,
                                          add_outdir=add_outdir,
                                          extra_report_keys=extra_report_keys,
                                          separate_resource_manager=separate_resource_manager,
                                          separate_report_manager=separate_report_manager,
                                          add_job_prefix=add_job_prefix)  # XXX
        
            if isinstance(args, list):
                instance.set_options_from_args(args)
            elif isinstance(args, dict):
                instance.set_options_from_dict(args)
            else:
                assert False
            
            if not is_quickapp:
                self.child_context = child_context
                res = instance.go()  
            else:
                instance.context = child_context
                res = instance.define_jobs_context(child_context)                
                
            # Add his jobs to our list of jobs
            context._jobs.update(child_context.all_jobs_dict()) 
            return res
        
        except Exception as e:
            msg = 'While trying to run  %s\n' % cmd_class.__name__
            msg += 'with arguments = %s\n' % args
            if '_options' in instance.__dict__:
                msg += ' parsed options: %s\n' % instance.get_options()
                msg += ' params: %s\n' % instance.get_options().get_params()
            if isinstance(e, QuickAppException):
                msg += indent(str(e), '> ')
            else:
                msg += indent(traceback.format_exc(e), '> ')
            raise QuickAppException(msg)
Exemple #12
0
def format_example(e):
	print('Filename: %s' % e.filename)
	text = ''
	# text += '<h2>Example <tt>%s</tt></h2>\n\n' % e.shortname

	text += e.description
	text += '\n\n'
	# text += "MCDPL Code:"
	code =   e.mcdp

	# code = code.replace('mcdp', '<strong>mcdp</strong>')
	text += '\n\n'
	try:
		text += ast_to_html(code, complete_document=False)
	except BaseException as err:
		text += '<pre>%s</pre>' % err

	if False:
		text += '<pre class="mcdp"><code class="mcdp">\n'
		def escape(x):
			x = x.replace('<', '&lt;')
			return x
		
		text += indent(higlight_chunk(escape(code)), ' '*4)

		text += '\n</code></pre>\n'
		text += '\n\n'

	# plots = ['ndp_clean']
	plots = ['ndp_default']
	try:
		outputs = do_plots(e.filename, plots, outdir, extra_params="")
	except DPSemanticError as e:
		text += '\n\n<code class="DPSemanticError">%s</code>\n\n' % indent(str(e), ' ' * 4)
	except DPInternalError as e:
		text += '\n\n<code class="DPInternalError">%s</code>\n\n' % indent(str(e), ' ' * 4)
	else:

		for ext, name, data in outputs:
			out = os.path.splitext(e.filename)[0] +'-%s.%s' % (name, ext)
			with open(out, 'w') as f:
				f.write(data)
			if ext in ['png'] :
				imgurl =  os.path.relpath(out, os.path.dirname(e.filename)) 
				print('url: %r' % imgurl)
				text += '\n <img class="output" src="%s"/> \n\n' % (imgurl)

	text += '\n\n'

	return text
Exemple #13
0
def get_object_tree(po: PlacedObject,
                    levels: int = 100,
                    spatial_relations: bool = False,
                    attributes: bool = False) -> str:
    ss = []
    ss.append('%s' % type(po).__name__)
    d = po.params_to_json_dict()
    d.pop('children', None)
    d.pop('spatial_relations', None)

    if attributes:
        if d:
            ds = yaml.safe_dump(d,
                                encoding='utf-8',
                                indent=4,
                                allow_unicode=True,
                                default_flow_style=False)
            if isinstance(ds, bytes):
                ds = ds.decode('utf-8')
            ss.append('\n' + indent(ds, ' '))

    if po.children and levels >= 1:
        ss.append('')
        N = len(po.children)
        for i, (child_name, child) in enumerate(po.children.items()):

            if i != N - 1:
                prefix1 = u'├ %s ┐ ' % child_name
                prefix2 = u'│ %s │ ' % (' ' * len(child_name))
            else:
                prefix1 = u'└ %s ┐ ' % child_name
                prefix2 = u'  %s │ ' % (' ' * len(child_name))
            c = get_object_tree(child,
                                attributes=attributes,
                                spatial_relations=spatial_relations,
                                levels=levels - 1)
            sc = indent(c, prefix2, prefix1)
            n = max(len(_) for _ in sc.split('\n'))
            sc += '\n' + prefix2[:-2] + u'└' + u'─' * (n - len(prefix2) + 3)
            ss.append(sc)

    if spatial_relations:
        if po.spatial_relations and levels >= 1:
            ss.append('')
            for r_name, rel in po.spatial_relations.items():
                ss.append(
                    '- from "%s" to "%s"  %s ' %
                    (url_from_fqn(rel.a), url_from_fqn(rel.b), rel.transform))

    return "\n".join(ss)
Exemple #14
0
    def read_one(self, expect_topic=None, timeout=None) -> MsgReceived:
        try:
            if expect_topic:
                waiting_for = f'Expecting topic "{expect_topic}" << {self.nickname}.'
            else:
                waiting_for = None
            msg = read_next_either_json_or_cbor(self.fpout, timeout=timeout, waiting_for=waiting_for)
            # if self._cc:
            #     msg_b = self._serialize(msg)
            #     self._cc.write(msg_b)
            #     self._cc.flush()

            topic = msg['topic']
            if topic == 'aborted':
                m = f'I was waiting for a message from component "{self.nickname}" but it aborted with the following error.'
                m += '\n\n' + indent(msg['data'], '|', f'{self.nickname} error |')
                raise Exception(m)  # XXX

            if expect_topic:
                if topic != expect_topic:
                    msg = f'I expected topic "{expect_topic}" but received "{topic}".'
                    raise Exception(msg)  # XXX
            if topic in basic_protocol.outputs:
                klass = basic_protocol.outputs[topic]
            else:
                if self.node_protocol:
                    if topic not in self.node_protocol.outputs:
                        msg = f'Cannot find topic "{topic}" in outputs of detected node protocol.'
                        msg += '\nI know: %s' % sorted(self.node_protocol.outputs)
                        raise Exception(msg)  # XXX
                    else:
                        klass = self.node_protocol.outputs[topic]
                else:
                    if not topic in self.expect_protocol.outputs:
                        msg = f'Cannot find topic "{topic}".'
                        raise Exception(msg)  # XXX
                    else:
                        klass = self.expect_protocol.outputs[topic]
            data = ipce_to_object(msg['data'], {}, expect_type=klass)

            if self._cc:
                msg['data'] = object_to_ipce(data, {}, with_schema=True)
                msg_b = self._serialize(msg)
                self._cc.write(msg_b)
                self._cc.flush()

            timing = ipce_to_object(msg['timing'], {}, expect_type=TimingInfo)
            self.nreceived += 1
            return MsgReceived[klass](topic, data, timing)

        except StopIteration as e:
            msg = 'EOF detected on %s after %d messages.' % (self.fnout, self.nreceived)
            if expect_topic:
                msg += f' Expected topic "{expect_topic}".'
            raise StopIteration(msg) from e
        except TimeoutError as e:
            msg = 'Timeout detected on %s after %d messages.' % (self.fnout, self.nreceived)
            if expect_topic:
                msg += f' Expected topic "{expect_topic}".'
            raise TimeoutError(msg) from e
Exemple #15
0
def broadcast_event(context, event):
    import inspect
    all_handlers = CompmakeGlobalState.EventHandlers.handlers

    handlers = all_handlers.get(event.name, [])
    if handlers:
        for handler in handlers:
            spec = inspect.getargspec(handler)
            try:
                kwargs = {}
                if 'event' in spec.args:
                    kwargs['event'] = event
                if 'context' in spec.args:
                    kwargs['context'] = context
                handler(**kwargs)
                # TODO: do not catch interrupted, etc.
            except Exception as e:
                try:
                    # e = traceback.format_exc(e)
                    msg = [
                        'compmake BUG: Error in event handler.',
                        '  event: %s' % event.name,
                        'handler: %s' % handler,
                        ' kwargs: %s' % list(event.kwargs.keys()),
                        '     bt: ',
                        indent(traceback.format_exc(), '| '),
                    ]
                    msg = "\n".join(msg)
                    CompmakeGlobalState.original_stderr.write(msg)
                except:
                    pass
    else:
        for handler in CompmakeGlobalState.EventHandlers.fallback:
            handler(context=context, event=event)
Exemple #16
0
def broadcast_event(context, event):
    import inspect
    all_handlers = CompmakeGlobalState.EventHandlers.handlers

    handlers = all_handlers.get(event.name, [])
    if handlers:
        for handler in handlers:
            spec = inspect.getargspec(handler)
            try:
                kwargs = {}
                if 'event' in spec.args:
                    kwargs['event'] = event
                if 'context' in spec.args:
                    kwargs['context'] = context
                handler(**kwargs)
                # TODO: do not catch interrupted, etc.
            except Exception as e:
                try:
                    # e = traceback.format_exc(e)
                    msg = [
                        'compmake BUG: Error in event handler.',
                        '  event: %s' % event.name,
                        'handler: %s' % handler,
                        ' kwargs: %s' % list(event.kwargs.keys()),
                        '     bt: ',
                        indent(my_format_exc(e), '| '),
                    ]
                    msg = "\n".join(msg)
                    CompmakeGlobalState.original_stderr.write(msg)
                except:
                    pass
    else:
        for handler in CompmakeGlobalState.EventHandlers.fallback:
            handler(context=context, event=event)
Exemple #17
0
def check_job_cache_says_failed(job_id, db, e):
    """ Raises CompmakeBug if the job is not marked as failed. """
    if not job_cache_exists(job_id, db):
        msg = ('The job %r was reported as failed but no record of '
               'it was found.' % job_id)
        msg += '\n' + 'JobFailed exception:'
        msg += '\n' + indent(str(e), "| ")
        raise CompmakeBug(msg)
    else:
        cache = get_job_cache(job_id, db)
        if not cache.state == Cache.FAILED:
            msg = ('The job %r was reported as failed but it was '
                   'not marked as such in the DB.' % job_id)
            msg += '\n seen state: %s ' % Cache.state2desc[cache.state]
            msg += '\n' + 'JobFailed exception:'
            msg += '\n' + indent(str(e), "| ")
            raise CompmakeBug(msg)
Exemple #18
0
def check_job_cache_says_failed(job_id, db, e):
    """ Raises CompmakeBug if the job is not marked as failed. """
    if not job_cache_exists(job_id, db):
        msg = ('The job %r was reported as failed but no record of '
               'it was found.' % job_id)
        msg += '\n' + 'JobFailed exception:'
        msg += '\n' + indent(str(e), "| ")
        raise CompmakeBug(msg)
    else:
        cache = get_job_cache(job_id, db)
        if not cache.state == Cache.FAILED:
            msg = ('The job %r was reported as failed but it was '
                   'not marked as such in the DB.' % job_id)
            msg += '\n seen state: %s ' % Cache.state2desc[cache.state]
            msg += '\n' + 'JobFailed exception:'
            msg += '\n' + indent(str(e), "| ")
            raise CompmakeBug(msg)
Exemple #19
0
def job_failed(context, event):  # @UnusedVariable
    job_id = event.kwargs['job_id']
    reason = event.kwargs['reason']
    bt = event.kwargs['bt']

    msg = 'Job %r failed:' % job_id
    # s = reason.strip
    # if get_compmake_config('echo'):
    #     s += '\n' + bt
    msg += '\n' + indent(reason.strip(), '| ')

    if get_compmake_config('echo'):
        msg += '\n' + indent(bt.strip(), '> ')
    else:
        msg += '\nUse "config echo 1" to have errors displayed.'
    msg += '\nWrite "details %s" to inspect the error.' % job_id
    error(my_prefix + msg)
def job_failed(context, event):  # @UnusedVariable
    job_id = event.kwargs['job_id']
    reason = event.kwargs['reason']
    bt = event.kwargs['bt']

    msg = 'Job %r failed:' % job_id
    # s = reason.strip
    # if get_compmake_config('echo'):
    #     s += '\n' + bt
    msg += '\n' + indent(reason.strip(), '| ')
    
    if get_compmake_config('echo'):
        s = bt.strip()
        msg += '\n' + indent(s, '> ')
    else:
        msg += '\nUse "config echo 1" to have errors displayed.' 
    msg += '\nWrite "details %s" to inspect the error.' % job_id
    error(my_prefix + msg)
def make_server_request(token, endpoint, data=None, method='GET', timeout=3):
    """
        Raise RequestFailed or ConnectionError.

        Returns the result in 'result'.
    """
    server = get_duckietown_server_url()
    url = server + endpoint

    headers = {'X-Messaging-Token': token}
    if data is not None:
        data = json.dumps(data)
    req = urllib2.Request(url, headers=headers, data=data)
    req.get_method = lambda: method
    try:
        res = urllib2.urlopen(req, timeout=timeout)
        data = res.read()
    except urllib2.URLError as e:
        msg = 'Cannot connect to server %s' % url
        raise_wrapped(ConnectionError, e, msg)
        raise

    try:
        result = json.loads(data)
    except ValueError as e:
        msg = 'Cannot read answer from server.'
        msg += '\n\n' + indent(data, '  > ')
        raise_wrapped(ConnectionError, e, msg)
        raise

    if not isinstance(result, dict) or 'ok' not in result:
        msg = 'Server provided invalid JSON response. Expected a dict with "ok" in it.'
        msg += '\n\n' + indent(data, '  > ')
        raise ConnectionError(msg)

    if result['ok']:
        if 'result' not in result:
            msg = 'Server provided invalid JSON response. Expected a field "result".'
            msg += '\n\n' + indent(result, '  > ')
            raise ConnectionError(msg)
        return result['result']
    else:
        msg = 'Failed request for %s:\n%s' % (url, result.get('error', result))
        raise RequestFailed(msg)
Exemple #22
0
    def instance(self, block_type, name, config, library=None):
        from procgraph.core.exceptions import ModelInstantionError, SemanticError
        if library is None:
            library = self
        generator = self.get_generator_for_block_type(block_type)
        try:
            block = generator(name=name, config=config, library=library)
        except Exception as e:
            msg = 'Could not instance block from generator.\n'
            msg += '        name: %s  \n' % name
            msg += '      config: %s  \n' % config
            msg += '   generator: %s  \n' % generator
            msg += '     of type: %s  \n' % describe_type(generator)
            msg += 'Because of this exception:\n'
            if isinstance(e, (SemanticError, ModelInstantionError)):
                msg += indent('%s' % e, '| ')
            else:
                msg += indent('%s\n%s' % (e, traceback.format_exc(e)), '| ')
            raise ModelInstantionError(msg)  # TODO: use Procgraph own exception

        block.__dict__['generator'] = generator
        return block
Exemple #23
0
    def instance(self, block_type, name, config, library=None):
        from procgraph.core.exceptions import ModelInstantionError, SemanticError
        if library is None:
            library = self
        generator = self.get_generator_for_block_type(block_type)
        try:
            block = generator(name=name, config=config, library=library)
        except Exception as e:
            msg = 'Could not instance block from generator.\n'
            msg += '        name: %s  \n' % name
            msg += '      config: %s  \n' % config
            msg += '   generator: %s  \n' % generator
            msg += '     of type: %s  \n' % describe_type(generator)
            msg += 'Because of this exception:\n'
            if isinstance(e, (SemanticError, ModelInstantionError)):
                msg += indent('%s' % e, '| ')
            else:
                msg += indent('%s\n%s' % (e, traceback.format_exc(e)), '| ')
            raise ModelInstantionError(
                msg)  # TODO: use Procgraph own exception

        block.__dict__['generator'] = generator
        return block
Exemple #24
0
def describe(ip: InteractionProtocol):
    s = "InteractionProtocol"

    s += '\n\n' + '* Description:'
    s += '\n\n' + indent(ip.description.strip(), '    ')

    s += '\n\n' + '* Inputs:'
    for name, type_ in ip.inputs.items():
        s += '\n  %25s: %s' % (name, type_)

    s += '\n\n' + '* Outputs:'
    for name, type_ in ip.outputs.items():
        s += '\n  %25s: %s' % (name, type_)

    s += '\n\n' + '* Language:'
    s += '\n\n' + ip.language

    return s
Exemple #25
0
def interpret_commands_wrap(commands, context, cq):
    """ 
        Returns None or raises CommandFailed, ShellExitRequested, 
            CompmakeBug, KeyboardInterrupt.
    """
    assert context is not None
    publish(context, 'command-line-starting', command=commands)

    try:
        interpret_commands(commands, context=context, cq=cq)
        publish(context, 'command-line-succeeded', command=commands)
    except CompmakeBug:
        raise
    except UserError as e:
        publish(context, 'command-line-failed', command=commands, reason=e)
        raise CommandFailed(str(e))
    except CommandFailed as e:
        publish(context, 'command-line-failed', command=commands, reason=e)
        raise
    except (KeyboardInterrupt, JobInterrupted) as e:
        publish(context,
                'command-line-interrupted',
                command=commands,
                reason='KeyboardInterrupt')
        # If debugging
        # tb = traceback.format_exc()
        # print tb  # XXX
        raise CommandFailed(str(e))
        # raise CommandFailed('Execution of %r interrupted.' % commands)
    except ShellExitRequested:
        raise
    except Exception as e:
        tb = traceback.format_exc()
        msg0 = ('Warning, I got this exception, while it should '
                'have been filtered out already. '
                'This is a compmake BUG that should be reported '
                'at http://github.com/AndreaCensi/compmake/issues')
        msg = msg0 + "\n" + indent(tb, 'bug| ')
        publish(context, 'compmake-bug', user_msg=msg, dev_msg="")  # XXX
        raise_wrapped(CompmakeBug, e, msg)
Exemple #26
0
def interpret_commands_wrap(commands, context, cq):
    """ 
        Returns None or raises CommandFailed, ShellExitRequested, 
            CompmakeBug, KeyboardInterrupt.
    """
    assert context is not None
    publish(context, 'command-line-starting', command=commands)

    try:
        interpret_commands(commands, context=context, cq=cq)
        publish(context, 'command-line-succeeded', command=commands)
    except CompmakeBug:
        raise
    except UserError as e:
        publish(context, 'command-line-failed', command=commands, reason=e)
        raise CommandFailed(str(e))
    except CommandFailed as e:
        publish(context, 'command-line-failed', command=commands, reason=e)
        raise
    except (KeyboardInterrupt, JobInterrupted) as e:
        publish(context, 'command-line-interrupted',
                command=commands, reason='KeyboardInterrupt')
        # If debugging
        # tb = traceback.format_exc()
        # print tb  # XXX 
        raise CommandFailed(str(e))
        # raise CommandFailed('Execution of %r interrupted.' % commands)
    except ShellExitRequested:
        raise
    except Exception as e:
        tb = traceback.format_exc()
        msg0 = ('Warning, I got this exception, while it should '
                'have been filtered out already. '
                'This is a compmake BUG that should be reported '
                'at http://github.com/AndreaCensi/compmake/issues')
        msg = msg0 + "\n" + indent(tb, 'bug| ')
        publish(context, 'compmake-bug', user_msg=msg, dev_msg="")  # XXX
        raise_wrapped(CompmakeBug, e, msg0)
Exemple #27
0
def identify_command(command) -> NodeInfo:
    d = [{
        'topic': 'wrapper.describe_protocol'
    }, {
        'topic': 'wrapper.describe_config'
    }, {
        'topic': 'wrapper.describe_node'
    }, {
        'topic': 'wrapper.describe_build'
    }]
    to_send = b''
    for p in d:
        p['compat'] = ['aido2']
        to_send += (json.dumps(p) + '\n').encode('utf-8')
    cp = subprocess.run(command, input=to_send, capture_output=True)
    s = cp.stderr.decode('utf-8')

    sys.stderr.write(indent(s.strip(), '|', ' stderr: |') + '\n\n')
    # noinspection PyTypeChecker
    f = BufferedReader(BytesIO(cp.stdout))
    stream = read_cbor_or_json_objects(f)

    res = stream.__next__()
    pd: ProtocolDescription = ipce_to_object(res['data'], {}, {},
                                             expect_type=ProtocolDescription)
    res = stream.__next__()
    cd: ConfigDescription = ipce_to_object(res['data'], {}, {},
                                           expect_type=ConfigDescription)
    res = stream.__next__()
    nd: NodeDescription = ipce_to_object(res['data'], {}, {},
                                         expect_type=NodeDescription)
    res = stream.__next__()
    bd: BuildDescription = ipce_to_object(res['data'], {}, {},
                                          expect_type=BuildDescription)

    return NodeInfo(pd, nd, bd, cd)
Exemple #28
0
 def __str__(self):
     s = 'Host %r failed for %r: %s\n%s' % (self.host, self.job_id,
                                            self.reason,
                                            indent(self.bt, '|'))
     return s
Exemple #29
0
def render_complete(library, s, raise_errors, realpath, generate_pdf=False,
                    check_refs=False, use_mathjax=True, filter_soup=None,
                    symbols=None, res=None, location=None,
                    ignore_ref_errors=False):
    """
        Transforms markdown into html and then renders the mcdp snippets inside.

        s: a markdown string with embedded html snippets

        Returns an HTML string; not a complete document.

        filter_soup(library, soup)
    """
    if res is None:
        res = AugmentedResult()
    if location is None:
        location = LocationUnknown()
    from mcdp_report.gg_utils import resolve_references_to_images
    s0 = s

    unique = get_md5(realpath)[:8]
    check_good_use_of_special_paragraphs(s0, res, location)
    raise_missing_image_errors = raise_errors

    # Imports here because of circular dependencies
    from .latex.latex_preprocess import extract_maths, extract_tabular
    from .latex.latex_preprocess import latex_preprocessing
    from .latex.latex_preprocess import replace_equations
    from .macro_col2 import col_macros, col_macros_prepare_before_markdown
    from .mark.markd import render_markdown
    from .preliminary_checks import do_preliminary_checks_and_fixes
    from .prerender_math import prerender_mathjax

    if isinstance(s, unicode):
        msg = 'I expect a str encoded with utf-8, not unicode.'
        raise_desc(TypeError, msg, s=s)

    # need to do this before do_preliminary_checks_and_fixes
    # because of & char
    s, tabulars = extract_tabular(s)

    s = do_preliminary_checks_and_fixes(s, res, location)
    # put back tabular, because extract_maths needs to grab them
    for k, v in tabulars.items():
        assert k in s
        s = s.replace(k, v)

    # copy all math content,
    #  between $$ and $$
    #  between various limiters etc.
    # returns a dict(string, substitution)
    s, maths = extract_maths(s)
    #     print('maths = %s' % maths)
    for k, v in list(maths.items()):
        if v[0] == '$' and v[1] != '$$':
            if '\n\n' in v:
                msg = 'The Markdown pre-processor got confused by this math fragment:'
                msg += '\n\n' + indent(v, '  > ')
                res.note_error(msg, location)
                maths[k] = 'ERROR'

    s = latex_preprocessing(s)
    s = '<div style="display:none">Because of mathjax bug</div>\n\n\n' + s

    # cannot parse html before markdown, because md will take
    # invalid html, (in particular '$   ciao <ciao>' and make it work)

    s = s.replace('*}', '\*}')

    s, mcdpenvs = protect_my_envs(s)
    #     print('mcdpenvs = %s' % maths)

    s = col_macros_prepare_before_markdown(s)

    #     print(indent(s, 'before markdown | '))
    s = render_markdown(s)
    #     print(indent(s, 'after  markdown | '))

    for k, v in maths.items():
        if not k in s:
            msg = 'Internal error while dealing with Latex math.'
            msg += '\nCannot find %r (= %r)' % (k, v)
            res.note_error(msg, location)
            # raise_desc(DPInternalError, msg, s=s)
            continue

        def preprocess_equations(x):
            # this gets mathjax confused
            x = x.replace('>', '\\gt{}')  # need brace; think a<b -> a\lt{}b
            x = x.replace('<', '\\lt{}')
            #             print('replaced equation %r by %r ' % (x0, x))
            return x

        v = preprocess_equations(v)
        s = s.replace(k, v)

    s = replace_equations(s)
    s = s.replace('\\*}', '*}')

    # this parses the XML
    soup = bs(s)

    other_abbrevs(soup, res, location)

    substitute_special_paragraphs(soup, res, location)
    create_notes_from_elements(soup, res, location, unique)

    # need to process tabular before mathjax
    escape_for_mathjax(soup)

    #     print(indent(s, 'before prerender_mathjax | '))
    # mathjax must be after markdown because of code blocks using "$"

    s = to_html_stripping_fragment(soup)

    if use_mathjax:
        s = prerender_mathjax(s, symbols, res)

    soup = bs(s)
    escape_for_mathjax_back(soup)
    s = to_html_stripping_fragment(soup)

    #     print(indent(s, 'after prerender_mathjax | '))
    for k, v in mcdpenvs.items():
        # there is this case:
        # ~~~
        # <pre> </pre>
        # ~~~
        s = s.replace(k, v)

    s = s.replace('<p>DRAFT</p>', '<div class="draft">')

    s = s.replace('<p>/DRAFT</p>', '</div>')

    soup = bs(s)
    mark_console_pres(soup, res, location)

    # try:

    # except Exception as e:
    #     msg = 'I got an error while substituting github: references.'
    #     msg += '\nI will ignore this error because it might not be the fault of the writer.'
    #     msg += '\n\n' + indent(str(e), '|', ' error: |')
    #

    # must be before make_figure_from_figureid_attr()
    display_files(soup, defaults={}, res=res, location=location, raise_errors=raise_errors)

    make_figure_from_figureid_attr(soup, res, location)
    col_macros(soup)
    fix_subfig_references(soup)

    library = get_library_from_document(soup, default_library=library)

    from .highlight import html_interpret
    html_interpret(library, soup, generate_pdf=generate_pdf,
                   raise_errors=raise_errors, realpath=realpath, res=res, location=location)
    if filter_soup is not None:
        filter_soup(library=library, soup=soup)

    if False:
        embed_images_from_library2(soup=soup, library=library,
                                   raise_errors=raise_missing_image_errors,
                                   res=res, location=location)
    else:
        resolve_references_to_images(soup=soup, library=library,
                                     raise_errors=raise_missing_image_errors,
                                     res=res, location=location)

    make_videos(soup, res, location, raise_on_errors=False)

    if check_refs:
        check_if_any_href_is_invalid(soup, res, location, ignore_ref_errors=ignore_ref_errors)

    if False:
        if getuser() == 'andrea':
            if MCDPConstants.preprocess_style_using_less:
                run_lessc(soup)
            else:
                logger.warning(
                        'preprocess_style_using_less=False might break the manual')

    fix_validation_problems(soup)

    strip_pre(soup)

    if MCDPManualConstants.enable_syntax_higlighting:
        syntax_highlighting(soup)

    if MCDPManualConstants.enforce_lang_attribute:
        check_lang_codes(soup, res, location)

    # Fixes the IDs (adding 'sec:'); add IDs to missing ones
    globally_unique_id_part = 'autoid-DO-NOT-USE-THIS-VERY-UNSTABLE-LINK-' + get_md5(realpath)[:8]
    fix_ids_and_add_missing(soup, globally_unique_id_part, res, location)

    check_no_patently_wrong_links(soup, res, location)

    if MCDPManualConstants.enforce_status_attribute:
        check_status_codes(soup, realpath, res, location)

    s = to_html_stripping_fragment(soup)
    s = replace_macros(s)

    return s
Exemple #30
0
def write_errors_and_warnings_files(aug, d):
    if aug.has_result():
        id2filename = aug.get_result()
    else:
        id2filename = {}
    # print('id2filename: %s' % sorted(id2filename))
    assert isinstance(aug, AugmentedResult)
    aug.update_refs(id2filename)

    header = get_notes_panel(aug)

    manifest = []
    nwarnings = len(aug.get_notes_by_tag(MCDPManualConstants.NOTE_TAG_WARNING))
    fn = os.path.join(d, 'warnings.html')

    html = html_list_of_notes(aug,
                              MCDPManualConstants.NOTE_TAG_WARNING,
                              'warnings',
                              'warning',
                              header=header)
    # update_refs_('warnings', html, id2filename)

    write_data_to_file(str(html), fn, quiet=True)
    if nwarnings:
        manifest.append(
            dict(display='%d warnings' % nwarnings, filename='warnings.html'))
        msg = 'There were %d warnings: %s' % (nwarnings, fn)
        logger.warn(msg)

    ntasks = len(aug.get_notes_by_tag(MCDPManualConstants.NOTE_TAG_TASK))
    fn = os.path.join(d, 'tasks.html')

    html = html_list_of_notes(aug,
                              MCDPManualConstants.NOTE_TAG_TASK,
                              'tasks',
                              'task',
                              header=header)
    # update_refs_('tasks', html, id2filename)
    write_data_to_file(str(html), fn, quiet=True)
    if nwarnings:
        manifest.append(
            dict(display='%d tasks' % ntasks, filename='tasks.html'))
        msg = 'There are %d open tasks: %s' % (ntasks, fn)
        logger.info(msg)

    nerrors = len(aug.get_notes_by_tag(MCDPManualConstants.NOTE_TAG_ERROR))
    fn = os.path.join(d, 'errors.html')
    html = html_list_of_notes(aug,
                              MCDPManualConstants.NOTE_TAG_ERROR,
                              'errors',
                              'error',
                              header=header)
    # update_refs_('tasks', html, id2filename)
    write_data_to_file(str(html), fn, quiet=True)
    if nerrors:
        manifest.append(
            dict(display='%d errors' % nerrors, filename='errors.html'))

        msg = 'I am sorry to say that there were %d errors.\n\nPlease see: %s' % (
            nerrors, fn)
        logger.error('\n\n\n' + indent(msg, ' ' * 15) + '\n\n')

    fn = os.path.join(d, 'errors_and_warnings.manifest.yaml')
    write_data_to_file(yaml.dump(manifest), fn, quiet=False)

    fn = os.path.join(d, 'errors_and_warnings.pickle')
    res = AugmentedResult()
    res.merge(aug)
    write_data_to_file(pickle.dumps(res), fn, quiet=False)
Exemple #31
0
def check_if_any_href_is_invalid(soup, res, location0, extra_refs=None,
                                 ignore_ref_errors=False):
    """
        Checks if references are invalid and tries to correct them.

        also works the magic
    """

    if extra_refs is None:
        extra_refs = Tag(name='div')
    else:
        print('using extra cross refs')

    # let's first find all the IDs
    id2element_current, duplicates = get_id2element(soup, 'id')
    id2element_extra, _ = get_id2element(extra_refs, 'id')
    id2element = {}
    id2element.update(id2element_extra)
    id2element.update(id2element_current)

    for a in soup.select('[href^="#"]'):
        href = a['href']
        assert href.startswith('#')
        ID = href[1:]

        if a.has_attr('class') and "mjx-svg-href" in a['class']:
            msg = 'Invalid math reference (sorry, no details): href = %s .' % href
            location = HTMLIDLocation.for_element(a, location0)
            res.note_error(msg, location)
            continue

        if ID not in id2element:
            # try to fix it

            # if there is already a prefix, remove it
            if ':' in href:
                i = href.index(':')
                core = href[i + 1:]
            else:
                core = ID

            possible = MCDPManualConstants.all_possible_prefixes_that_can_be_implied

            matches = []
            others = []
            for possible_prefix in possible:
                why_not = possible_prefix + ':' + core
                others.append(why_not)
                if why_not in id2element:
                    matches.append(why_not)

            if len(matches) > 1:
                msg = '%s not found, and multiple matches for heuristics (%s)' % (href, matches)
                location = HTMLIDLocation.for_element(a, location0)
                res.note_error(msg, location)

            elif len(matches) == 1:

                # if 'base_url' in element.attrs:
                #     a['href'] = element.attrs['base_url'] + '#' + matches[0]
                # else:
                a.attrs['href'] = '#' + matches[0]

                if matches[0] not in id2element_current:
                    element = id2element[matches[0]]
                    # msg = 'Using foreign resolve for %s -> %s' % (matches[0], a['href'])
                    # logger.info(msg)
                    a.attrs['href_external'] = element.attrs['base_url'] + '#' + matches[0]

                if show_debug_message_for_corrected_links:
                    msg = '%s not found, but corrected in %s' % (href, matches[0])
                    location = HTMLIDLocation.for_element(a, location0)
                    res.note_warning(msg, location)

            else:
                if has_class(a, MCDPConstants.CLASS_IGNORE_IF_NOT_EXISTENT):
                    del a.attrs['href']
                    # logger.warning('ignoring link %s' % a)
                else:
                    msg = 'I do not know what is indicated by the link %r.' % href
                    marker = Tag(name='span')
                    marker.attrs['class'] = 'inside-unknown-link'
                    marker.append(' (unknown ref %s)' % core)
                    a.append(marker)
                    location = HTMLIDLocation.for_element(a, location0)
                    if ignore_ref_errors:
                        msg2 = 'I will ignore this error because this is the first pass:'******'\n\n' + indent(msg, ' > ')
                        res.note_warning(msg2, location)
                    else:
                        res.note_error(msg, location)

        if ID in duplicates:
            msg = 'More than one element matching %r.' % href
            location = HTMLIDLocation.for_element(a, location0)
            res.note_error(msg, location)
Exemple #32
0
    def process(self):
        """ Start processing jobs. """
        # logger.info('Started job manager with %d jobs.' % (len(self.todo)))
        self.check_invariants()

        if not self.todo and not self.ready_todo:
            publish(self.context, 'manager-succeeded',
                    nothing_to_do=True,
                    targets=self.targets, done=self.done,
                    all_targets=self.all_targets,
                    todo=self.todo,
                    failed=self.failed,
                    blocked=self.blocked,
                    ready=self.ready_todo,
                    processing=self.processing)
            return True

        publish(self.context, 'manager-phase', phase='init')
        self.process_init()

        publish(self.context, 'manager-phase', phase='loop')
        try:
            i = 0
            while self.todo or self.ready_todo or self.processing:
                self.log(indent(self._get_situation_string(), '%s: ' % i))
                i += 1
                self.check_invariants()
                # either something ready to do, or something doing
                # otherwise, we are completely blocked
                if (not self.ready_todo) and (not self.processing):
                    msg = ('Nothing ready to do, and nothing cooking. '
                           'This probably means that the Compmake job '
                           'database was inconsistent. '
                           'This might happen if the job creation is '
                           'interrupted. Use the command "check-consistency" '
                           'to check the database consistency.\n'
                           + self._get_situation_string())
                    raise CompmakeBug(msg)

                self.publish_progress()
                waiting_on = self.instance_some_jobs()
                # self.publish_progress()

                publish(self.context, 'manager-wait', reasons=waiting_on)

                if self.ready_todo and not self.processing:
                    # We time out as there are no resources
                    publish(self.context, 'manager-phase', phase='wait')

                self.loop_until_something_finishes()
                self.check_invariants()
            self.log(indent(self._get_situation_string(), 'ending: '))

            # end while
            assert not self.todo
            assert not self.ready_todo
            assert not self.processing
            self.check_invariants()

            self.publish_progress()

            self.process_finished()

            publish(self.context, 'manager-succeeded',
                    nothing_to_do=False,
                    targets=self.targets, done=self.done,
                    all_targets=self.all_targets,
                    todo=self.todo, failed=self.failed, ready=self.ready_todo,
                    blocked=self.blocked,
                    processing=self.processing)

            return True

        except JobInterrupted as e:
            from ..ui import error

            error('Received JobInterrupted: %s' % e)
            raise
        except KeyboardInterrupt:
            raise KeyboardInterrupt('Manager interrupted.')
        finally:
            self.cleanup()
Exemple #33
0
def get_cross_refs(src_dirs, permalink_prefix, extra_crossrefs, ignore=[]):
    res = AugmentedResult()
    files = look_for_files(src_dirs, "crossref.html")
    id2file = {}
    soup = Tag(name='div')

    def add_from_soup(s, f, ignore_alread_present, ignore_if_conflict):
        for img in list(s.find_all('img')):
            img.extract()

        for e in s.select('[base_url]'):
            e['external_crossref_file'] = f

        # Remove the ones with the same base_url
        for e in list(s.select('[base_url]')):
            if e.attrs['base_url'] == permalink_prefix:
                e.extract()

        for e in s.select('[id]'):
            id_ = e.attrs['id']
            if id_ == 'container': continue  # XXX:

            if id_ in id2file:
                if not ignore_alread_present:
                    msg = 'Found two elements with same ID "%s":' % id_
                    msg += '\n %s' % id2file[id_]
                    msg += '\n %s' % f
                    res.note_error(msg)
            else:
                id2file[id_] = f
                e2 = e.__copy__()
                if ignore_if_conflict:
                    e2.attrs['ignore_if_conflict'] = '1'
                soup.append(e2)
                soup.append('\n')

    ignore = [os.path.realpath(_) for _ in ignore]
    for _f in files:
        if os.path.realpath(_f) in ignore:
            msg = 'Ignoring file %r' % _f
            logger.info(msg)
            continue
        logger.info('cross ref file %s' % _f)
        data = open(_f).read()
        if permalink_prefix in data:
            msg = 'skipping own file'
            logger.debug(msg)
            continue
        s = bs(data)
        add_from_soup(s,
                      _f,
                      ignore_alread_present=False,
                      ignore_if_conflict=False)

    if extra_crossrefs is not None:
        logger.info('Reading external refs\n%s' % extra_crossrefs)
        try:
            r = requests.get(extra_crossrefs)
        except Exception as ex:
            msg = 'Could not read external cross reference links'
            msg += '\n  %s' % extra_crossrefs
            msg += '\n\n' + indent(str(ex), ' > ')
            res.note_error(msg)
        else:
            logger.debug('%s %s' % (r.status_code, extra_crossrefs))
            if r.status_code == 404:
                msg = 'Could not read external cross refs: %s' % r.status_code
                msg += '\n url: ' + extra_crossrefs
                msg += '\n This is normal if you have not pushed this branch yet.'
                res.note_warning(msg)
                # logger.error(msg)
            s = bs(r.text)
            add_from_soup(s,
                          extra_crossrefs,
                          ignore_alread_present=True,
                          ignore_if_conflict=True)

    # print soup
    res.set_result(str(soup))
    return res
Exemple #34
0
def pmake_worker(name, job_queue, result_queue, signal_queue, signal_token,
                 write_log=None):
    if write_log:
        f = open(write_log, 'w')

        def log(s):
            #print('%s: %s' % (name, s))
            f.write('%s: ' % name)
            f.write(s)
            f.write('\n')
            f.flush()
    else:
        def log(s):
            print('%s: %s' % (name, s))
            pass

    log('started pmake_worker()')
    signal.signal(signal.SIGINT, signal.SIG_IGN)

    def put_result(x):
        log('putting result in result_queue..')
        result_queue.put(x, block=True)
        if signal_queue is not None:
            log('putting result in signal_queue..')
            signal_queue.put(signal_token, block=True)
        log('(done)')

    try:
        while True:
            log('Listening for job')
            try:
                job = job_queue.get(block=True, timeout=5)
            except Empty:
                log('Could not receive anything.')
                continue
            if job == PmakeSub.EXIT_TOKEN:
                log('Received EXIT_TOKEN.')
                break

            log('got job: %s' % str(job))
            function, arguments = job
            try:
                result = function(arguments)
            except JobFailed as e:
                log('Job failed, putting notice.')
                log('result: %s' % str(e))  # debug
                put_result(e.get_result_dict())
            except JobInterrupted as e:
                log('Job interrupted, putting notice.')
                put_result(dict(abort=str(e)))  # XXX
            except CompmakeBug as e:  # XXX :to finish
                log('CompmakeBug')
                put_result(e.get_result_dict())
            else:
                log('result: %s' % str(result))
                put_result(result)

            log('...done.')

            # except KeyboardInterrupt: pass
    except BaseException as e:
        reason = 'aborted because of uncaptured:\n' + indent(
                traceback.format_exc(), '| ')
        mye = HostFailed(host="???", job_id="???",
                         reason=reason, bt=traceback.format_exc())
        log(str(mye))
        put_result(mye.get_result_dict())
    except:
        mye = HostFailed(host="???", job_id="???",
                         reason='Uknown exception (not BaseException)',
                         bt="not available")
        log(str(mye))
        put_result(mye.get_result_dict())
        log('(put)')


    if signal_queue is not None:
        signal_queue.close()
    result_queue.close()
    log('clean exit.')
def job_interrupted(context, event):  # @UnusedVariable
    error(my_prefix + 'Job %r interrupted:\n %s' %
          (event.kwargs['job_id'],
           indent(event.kwargs['bt'], '> ')))
Exemple #36
0
def manager_host_failed(context, event):  # @UnusedVariable
    s = 'Host failed for job %s: %s' % (event.job_id, event.reason)
    s += indent(event.bt.strip(), '| ')
    error(s)
Exemple #37
0
def manager_host_failed(context, event):  # @UnusedVariable
    s = 'Host failed for job %s: %s' % (event.job_id, event.reason)
    s += indent(event.bt.strip(), '| ')
    error(s)
Exemple #38
0
def pmake_worker(name, job_queue, result_queue, signal_queue, signal_token, write_log=None):
    if write_log:
        f = open(write_log, "w")

        def log(s):
            f.write("%s: " % name)
            f.write(s)
            f.write("\n")
            f.flush()

    else:

        def log(s):
            pass

    log("started pmake_worker()")
    signal.signal(signal.SIGINT, signal.SIG_IGN)

    def put_result(x):
        log("putting result in result_queue..")
        result_queue.put(x, block=True)
        if signal_queue is not None:
            log("putting result in signal_queue..")
            signal_queue.put(signal_token, block=True)
        log("(done)")

    try:
        while True:
            log("Listening for job")
            job = job_queue.get(block=True)
            log("got job: %s" % str(job))
            if job == PmakeSub.EXIT_TOKEN:
                break
            function, arguments = job
            try:
                result = function(arguments)
            except JobFailed as e:
                log("Job failed, putting notice.")
                log("result: %s" % str(e))  # debug
                put_result(e.get_result_dict())
            except JobInterrupted as e:
                log("Job interrupted, putting notice.")
                put_result(dict(abort=str(e)))  # XXX
            except CompmakeBug as e:  # XXX :to finish
                log("CompmakeBug")
                put_result(e.get_result_dict())
            else:
                log("result: %s" % str(result))
                put_result(result)

            log("...done.")

            # except KeyboardInterrupt: pass
    except BaseException as e:
        reason = "aborted because of uncaptured:\n" + indent(traceback.format_exc(e), "| ")
        mye = HostFailed(host="???", job_id="???", reason=reason, bt=traceback.format_exc(e))
        log(str(mye))
        put_result(mye.get_result_dict())
    except:
        mye = HostFailed(host="???", job_id="???", reason="Uknown exception (not BaseException)", bt="not available")
        log(str(mye))
        put_result(mye.get_result_dict())
        log("(put)")

    if signal_queue is not None:
        signal_queue.close()
    result_queue.close()
    log("clean exit.")
Exemple #39
0
def job_interrupted(context, event):  # @UnusedVariable
    error(my_prefix + 'Job %r interrupted:\n %s' %
          (event.kwargs['job_id'], indent(event.kwargs['bt'], '> ')))
Exemple #40
0
    def process(self):
        """ Start processing jobs. """
        # logger.info('Started job manager with %d jobs.' % (len(self.todo)))
        self.check_invariants()

        if not self.todo and not self.ready_todo:
            publish(self.context,
                    'manager-succeeded',
                    nothing_to_do=True,
                    targets=self.targets,
                    done=self.done,
                    all_targets=self.all_targets,
                    todo=self.todo,
                    failed=self.failed,
                    blocked=self.blocked,
                    ready=self.ready_todo,
                    processing=self.processing)
            return True

        publish(self.context, 'manager-phase', phase='init')
        self.process_init()

        publish(self.context, 'manager-phase', phase='loop')
        try:
            i = 0
            while self.todo or self.ready_todo or self.processing:
                self.log(indent(self._get_situation_string(), '%s: ' % i))
                i += 1
                self.check_invariants()
                # either something ready to do, or something doing
                # otherwise, we are completely blocked
                if (not self.ready_todo) and (not self.processing):
                    msg = ('Nothing ready to do, and nothing cooking. '
                           'This probably means that the Compmake job '
                           'database was inconsistent. '
                           'This might happen if the job creation is '
                           'interrupted. Use the command "check-consistency" '
                           'to check the database consistency.\n' +
                           self._get_situation_string())
                    raise CompmakeBug(msg)

                self.publish_progress()
                waiting_on = self.instance_some_jobs()
                # self.publish_progress()

                publish(self.context, 'manager-wait', reasons=waiting_on)

                if self.ready_todo and not self.processing:
                    # We time out as there are no resources
                    publish(self.context, 'manager-phase', phase='wait')

                self.loop_until_something_finishes()
                self.check_invariants()
            self.log(indent(self._get_situation_string(), 'ending: '))

            # end while
            assert not self.todo
            assert not self.ready_todo
            assert not self.processing
            self.check_invariants()

            self.publish_progress()

            self.process_finished()

            publish(self.context,
                    'manager-succeeded',
                    nothing_to_do=False,
                    targets=self.targets,
                    done=self.done,
                    all_targets=self.all_targets,
                    todo=self.todo,
                    failed=self.failed,
                    ready=self.ready_todo,
                    blocked=self.blocked,
                    processing=self.processing)

            return True

        except JobInterrupted as e:
            from ..ui import error

            error('Received JobInterrupted: %s' % e)
            raise
        except KeyboardInterrupt:
            raise KeyboardInterrupt('Manager interrupted.')
        finally:
            self.cleanup()
Exemple #41
0
def pmake_worker(name,
                 job_queue,
                 result_queue,
                 signal_queue,
                 signal_token,
                 write_log=None):
    if write_log:
        f = open(write_log, 'w')

        def log(s):
            #print('%s: %s' % (name, s))
            f.write('%s: ' % name)
            f.write(s)
            f.write('\n')
            f.flush()
    else:

        def log(s):
            print('%s: %s' % (name, s))
            pass

    log('started pmake_worker()')
    signal.signal(signal.SIGINT, signal.SIG_IGN)

    def put_result(x):
        log('putting result in result_queue..')
        result_queue.put(x, block=True)
        if signal_queue is not None:
            log('putting result in signal_queue..')
            signal_queue.put(signal_token, block=True)
        log('(done)')

    try:
        while True:
            log('Listening for job')
            try:
                job = job_queue.get(block=True, timeout=5)
            except Empty:
                log('Could not receive anything.')
                continue
            if job == PmakeSub.EXIT_TOKEN:
                log('Received EXIT_TOKEN.')
                break

            log('got job: %s' % str(job))
            function, arguments = job
            try:
                result = function(arguments)
            except JobFailed as e:
                log('Job failed, putting notice.')
                log('result: %s' % str(e))  # debug
                put_result(e.get_result_dict())
            except JobInterrupted as e:
                log('Job interrupted, putting notice.')
                put_result(dict(abort=str(e)))  # XXX
            except CompmakeBug as e:  # XXX :to finish
                log('CompmakeBug')
                put_result(e.get_result_dict())
            else:
                log('result: %s' % str(result))
                put_result(result)

            log('...done.')

            # except KeyboardInterrupt: pass
    except BaseException as e:
        reason = 'aborted because of uncaptured:\n' + indent(
            traceback.format_exc(), '| ')
        mye = HostFailed(host="???",
                         job_id="???",
                         reason=reason,
                         bt=traceback.format_exc())
        log(str(mye))
        put_result(mye.get_result_dict())
    except:
        mye = HostFailed(host="???",
                         job_id="???",
                         reason='Uknown exception (not BaseException)',
                         bt="not available")
        log(str(mye))
        put_result(mye.get_result_dict())
        log('(put)')

    if signal_queue is not None:
        signal_queue.close()
    result_queue.close()
    log('clean exit.')
def parmake_job2_new_process(args):
    """ Starts the job in a new compmake process. """
    (job_id, context) = args
    compmake_bin = which('compmake')

    db = context.get_compmake_db()
    storage = db.basepath  # XXX:
    where = os.path.join(storage, 'parmake_job2_new_process')
    if not os.path.exists(storage):
        try:
            os.makedirs(storage)
        except:
            pass

    out_result = os.path.join(where, '%s.results.pickle' % job_id)
    out_result = os.path.abspath(out_result)
    cmd = [compmake_bin, storage]

    if not all_disabled():
        cmd += ['--contracts']

    cmd += [
        '--status_line_enabled',
        '0',
        '--colorize',
        '0',
        '-c',
        'make_single out_result=%s %s' % (out_result, job_id),
    ]

    cwd = os.getcwd()
    cmd_res = system_cmd_result(cwd,
                                cmd,
                                display_stdout=False,
                                display_stderr=False,
                                raise_on_error=False,
                                capture_keyboard_interrupt=False)
    ret = cmd_res.ret

    if ret == CompmakeConstants.RET_CODE_JOB_FAILED:  # XXX:
        msg = 'Job %r failed in external process' % job_id
        msg += indent(cmd_res.stdout, 'stdout| ')
        msg += indent(cmd_res.stderr, 'stderr| ')

        res = safe_pickle_load(out_result)
        os.unlink(out_result)
        result_dict_check(res)

        raise JobFailed.from_dict(res)

    elif ret != 0:
        msg = 'Host failed while doing %r' % job_id
        msg += '\n cmd: %s' % " ".join(cmd)
        msg += '\n' + indent(cmd_res.stdout, 'stdout| ')
        msg += '\n' + indent(cmd_res.stderr, 'stderr| ')
        raise CompmakeBug(msg)  # XXX:

    res = safe_pickle_load(out_result)
    os.unlink(out_result)
    result_dict_check(res)
    return res