예제 #1
0
    def applies(self, dp1, dp2):
        # first must be Mux
        if not isinstance(dp1, Mux):
            return False

        # second must be Loop
        from mcdp_dp.dp_loop2 import DPLoop2
        if not isinstance(dp2, DPLoop2):
            return False

        # the first one inside Loop must be Mux, otherwise it 
        # doesn't simplify
        dp1s = unwrap_series(dp2.dp1)
        if not isinstance(dp1s[0], Mux):
            return False
        
        if dp1.coords == [()]:
            pass
        else:
            msg = 'Could not implement simplification' \
                ' for dp1.coords = {}'.format(dp1.coords)
            logger.debug(msg)
            return False
        
        return True
예제 #2
0
파일: drawing.py 프로젝트: AndreaCensi/mcdp
def plot_upset_R2(pylab, us, axis, color_shadow,
                  extra_space_shadow=0.05, color_lines='none', markers='r.',
                  marker_params={}):

    points = us.minimals

    xmin, xmax, ymin, ymax = axis
    for p in points:
        if xmin <= p[0] <= xmax and (ymin <= p[1] <= ymax):
            mcdp_dev_warning('This should be smarter')

            # draw a little of them
            if p[0] == xmax:
                axis = enlarge_x(axis, extra_space_shadow)
            if p[1] == ymax:
                axis = enlarge_y(axis, extra_space_shadow)

            plot_cone(pylab, p, axis, color_shadow=color_shadow,
                      color_lines=color_lines)
        else:
            logger.debug('Warning: point %s not in axis %s' % (p, axis))
            
    # cuteness
    if markers is not None:
        for p in points:
            # This is to avoid underflow
            # when using "finfo.tiny"
            eps = finfo.eps
            p = np.maximum(p, eps)
            #print('plot_upset_R2: marker params: %s ' % marker_params)
            pylab.plot(p[0], p[1], markers, clip_on=False, **marker_params)
예제 #3
0
파일: solve.py 프로젝트: AndreaCensi/mcdp
    def go(self):
        
        logger.setLevel(logging.DEBUG)

        options = self.get_options()
        
        if not options.contracts:
            logger.debug('Disabling PyContrats. Use --contracts to enable.')
            disable_all()
            
        if options.expect_nimp is not None:
            options.imp = True

        params = options.get_extra()

        if len(params) < 1:
            raise ValueError('Please specify model name.')

        model_name = params[0]

        # drop the extension
        if '.mcdp' in model_name or '/' in model_name:
            msg = 'The model name should not contain extension or /.'
            raise UserError(msg)

        max_steps = options.max_steps

        _exp_advanced = options.advanced
        expect_nres = options.expect_nres
        lower = options.lower
        upper = options.upper
        out_dir = options.out
        query_strings = params[1:]

        intervals = options.intervals
        imp = options.imp
        expect_nimp = options.expect_nimp
        make = options.make
        if make: imp = True

        plot = options.plot
        do_movie = options.movie

        expect_res = None

        config_dirs = options.config_dirs.split(":")
        maindir = options.maindir
        if options.cache:
            if out_dir is None:
                out_dir = 'out-mcdp_solve'
            cache_dir = os.path.join(out_dir, '_cached', 'solve')
        else:
            cache_dir = None

        solve_main(logger, config_dirs, maindir, cache_dir, model_name, lower, upper, out_dir, max_steps, query_strings,
                   intervals, _exp_advanced, expect_nres, imp, expect_nimp, plot, do_movie,
                   expect_res,
                   make)
예제 #4
0
파일: library.py 프로젝트: AndreaCensi/mcdp
        def actual_load():
            # maybe we should clone
            l = self.clone()
            logger.debug("Parsing %r" % (name))
            context_mine = Context()
            res = parsing_function(l, data, realpath, context=context_mine)

            setattr(res, ATTR_LOAD_NAME, name)
            return dict(res=res, context_warnings=context_mine.warnings, generation=current_generation)
예제 #5
0
파일: tests.py 프로젝트: AndreaCensi/mcdp
def timeit(desc, minimum=None):
    t0 = time.clock()
    yield
    t1 = time.clock()
    delta = t1 - t0
    if minimum is not None:
        if delta < minimum:
            return
    logger.debug('timeit %s: %.2f s (>= %s)' % (desc, delta, minimum))
예제 #6
0
def cndp_abstract(ndp):
    from .connection import get_connection_multigraph
    
    G = get_connection_multigraph(ndp.get_connections())
    cycles = list(simple_cycles(G))
    if len(cycles) > 0:
        logger.debug('cndp_abstract: %d cycles' % len(cycles))
    if not cycles:
        return dpgraph_making_sure_no_reps(ndp.context)
    else:
        return cndp_abstract_loop2(ndp)
예제 #7
0
파일: library.py 프로젝트: AndreaCensi/mcdp
 def use_cache_dir(self, cache_dir):
     try:
         if not os.path.exists(cache_dir):
             os.makedirs(cache_dir)
         fn = os.path.join(cache_dir, "touch")
         if os.path.exists(fn):
             os.unlink(fn)
         with open(fn, "w") as f:
             f.write("touch")
         os.unlink(fn)
     except Exception:
         logger.debug("Cannot write to folder %r. Not using caches." % cache_dir)
         self.cache_dir = None
     else:
         self.cache_dir = cache_dir
예제 #8
0
def parse_ndp_filename(filename, context=None):
    """ Reads the file and returns as NamedDP.
        The exception are annotated with filename. """
    with open(filename) as f:
        contents = f.read()
    try:
        return parse_ndp(contents, context)
    except MCDPExceptionWithWhere as e:
        active = True
        if active:
# http://stackoverflow.com/questions/1350671/inner-exception-with-traceback-in-python
            e = e.with_filename(filename)
            raise type(e), e.args, sys.exc_info()[2]
        else: # pragma: no cover
            logger.debug('Deactivated trace in parse_ndp_filename().')
            raise
예제 #9
0
def add_where_information(where):
    """ Adds where field to DPSyntaxError or DPSemanticError thrown by code. """
    active = True
    if not active:
        logger.debug('Note: Error tracing disabled in add_where_information().')
        
    if not active:
        mcdp_dev_warning('add_where_information is disabled')
        yield
    else:
        try:
            yield
        except MCDPExceptionWithWhere as e:
            mcdp_dev_warning('add magic traceback handling here')
            _, _, tb = sys.exc_info()
            raise_with_info(e, where, tb)
예제 #10
0
파일: tests.py 프로젝트: AndreaCensi/mcdp
def mcdplib_run_make(mcdplib):
    makefile = os.path.join(mcdplib, 'Makefile')
    assert os.path.exists(makefile)
    cwd = mcdplib
    cmd = ['make', 'clean', 'all']
    from system_cmd.meat import system_cmd_result
    logger.debug('$ cd %s' % cwd)
    env = os.environ.copy()
    if all_disabled():
        env['DISABLE_CONTRACTS'] = '1'
        msg = ('Disabling contracts in environment by adding '
               'DISABLE_CONTRACTS=%r.' % env['DISABLE_CONTRACTS'])
        logger.debug(msg)
        
    system_cmd_result(cwd, cmd,
                      display_stdout=True,
                      display_stderr=True,
                      raise_on_error=True,
                      env=env)
예제 #11
0
    def get_edges_to_consider():
        # For each set of cycles, find which edges are in the equivalence class
        from collections import defaultdict

        cycles2edges = defaultdict(lambda: set())
        for e in edges_belonging_to_cycles:
            cycles = freeze(cycles_for_edge(e))
            cycles2edges[cycles].add(e)

        cycles2champion = {}
        cycles2weight = {}
        for cycles, edges in cycles2edges.items():
            logger.debug("Found %s edges that remove a set of %s cycles" % (len(edges), len(cycles)))

            best = min(edges, key=edge_weight)

            cycles2champion[cycles] = best
            cycles2weight[cycles] = edge_weight(best)

        def a_contains_b(ca, cb):
            return cb.issubset(ca)

        consider = set()
        for cycles1 in cycles2weight:
            # logger.debug('cycles')
            for cycles2 in cycles2weight:
                w1 = cycles2weight[cycles1]
                w2 = cycles2weight[cycles2]
                if a_contains_b(cycles2, cycles1) and w2 < w1:
                    # logger.debug('dominated')
                    break
            else:
                # not dominated
                consider.add(cycles2champion[cycles1])

        logger.debug("From %d to %d edges to consider" % (len(edges_belonging_to_cycles), len(consider)))
        return consider
예제 #12
0
파일: library.py 프로젝트: AndreaCensi/mcdp
    def _load_generic(self, name, extension, parsing_function, context):
        """
            parsing_function takes string, context 
        """
        if context is None:
            context = Context()
        if not isinstance(name, str):
            msg = "Expected a string for the name."
            raise_desc(ValueError, msg, name=name)
        filename = "%s.%s" % (name, extension)
        f = self._get_file_data(filename)
        data = f["data"]
        realpath = f["realpath"]

        current_generation = 3

        def actual_load():
            # maybe we should clone
            l = self.clone()
            logger.debug("Parsing %r" % (name))
            context_mine = Context()
            res = parsing_function(l, data, realpath, context=context_mine)

            setattr(res, ATTR_LOAD_NAME, name)
            return dict(res=res, context_warnings=context_mine.warnings, generation=current_generation)

        if not self.cache_dir:
            res_data = actual_load()
            cached = False
        else:
            cache_file = os.path.join(self.cache_dir, parsing_function.__name__, "%s.cached" % name)

            res_data = memo_disk_cache2(cache_file, data, actual_load)
            cached = True

            if (
                not isinstance(res_data, dict)
                or not "generation" in res_data
                or res_data["generation"] < current_generation
            ):  # outdated cache
                logger.debug("Removing stale cache %r." % cache_file)
                res_data = actual_load()
                try:
                    os.unlink(cache_file)
                except Exception:
                    pass
                cached = False

        res = res_data["res"]
        context_warnings = res_data["context_warnings"]

        cached = "[Cached]" if cached else ""
        logger.debug("actual_load(): parsed %r with %d warnings %s" % (name, len(context_warnings), cached))

        class JustAHack:
            warnings = context_warnings

        msg = "While loading %r." % name
        from mcdp_lang.eval_warnings import warnings_copy_from_child_make_nested

        warnings_copy_from_child_make_nested(context, JustAHack, msg=msg, where=None)

        return res
예제 #13
0
def _report_loop_sequence(report, R, sips, converged, do_movie):
    """
        Returns a dictionary dict(str: list of png data)
    """
    sequences = {}
    
    UR = UpperSets(R)
    from matplotlib import pylab
    ieee_fonts(pylab)
    RepRepDefaults.savefig_params = dict(dpi=400, bbox_inches='tight', 
                                         pad_inches=0.01, transparent=False)

    figsize = (2, 2)
    
    try:
        available_plotters = list(get_plotters(get_all_available_plotters(), UR))
    except NotPlottable as e:
        msg = 'Could not find plotter for space UR = %s.' % UR
        raise_wrapped(DPInternalError, e, msg , UR=UR, compact=True)
    
    with report.subsection('sip') as r2:
        for name, plotter in available_plotters:
            sequences[name] = [] # sequence of png
            f = r2.figure(name, cols=5)

            axis = plotter.axis_for_sequence(UR, sips)

            axis = list(axis)
            axis[0] = 0.0
            axis[2] = 0.0
            axis[1] = min(axis[1], 1000.0)
            axis[3] = min(axis[3], 1000.0)
            axis = tuple(axis)

            visualized_axis = enlarge(axis, extra_space_top * 2)

            for i, sip in enumerate(sips):
                with f.plot('step%03d' % i, figsize=figsize) as pylab:
                    logger.debug('Plotting iteration %d/%d' % (i, len(sips)))
                    ieee_spines(pylab)
                    c_orange = '#FFA500'
                    c_red = [1, 0.5, 0.5]
                    plotter.plot(pylab, axis, UR, R.U(R.get_bottom()),
                                 params=dict(color_shadow=c_red, markers=None))
                    marker_params = dict(markersize=5, markeredgecolor='none')
                    plotter.plot(pylab, axis, UR, sip,
                                 params=dict(color_shadow=c_orange,
                                             markers_params=marker_params))
                    conv = converged[i]
                    c_blue = [0.6, 0.6, 1.0]
                    plotter.plot(pylab, axis, UR, conv,
                                 params=dict(color_shadow=c_blue))
                    
                    for c in conv.minimals:
                        p = plotter.toR2(c)
                        pylab.plot(p[0], p[1], 'go',
                                   markersize=5, markeredgecolor='none', 
                                   markerfacecolor='g', clip_on=False)
                    pylab.axis(visualized_axis)
                    from mcdp_ipython_utils.plotting import color_resources, set_axis_colors

                    set_axis_colors(pylab, color_resources, color_resources)

                if do_movie:
                    node = f.resolve_url('step%03d/png' % i)
                    png = node.raw_data
                    sequences[name].append(png)

    return sequences
예제 #14
0
def compact_context(context):
    """
        If there are two subs with multiple connections,
        we take the product of their wires.
    
    """
    from .context_functions import find_nodes_with_multiple_connections
    from mcdp_dp import Mux
    from mocdp.comp.wrap import dpwrap
    from mocdp.comp.connection import connect2

    s = find_nodes_with_multiple_connections(context)
    if not s:
        return context
    else:
        name1, name2, their_connections = s[0]
        logger.debug('Will compact %s, %s, %s' % s[0])

        # establish order
        their_connections = list(their_connections)
        s1s = [c.s1 for c in their_connections]
        s2s = [c.s2 for c in their_connections]
 
        # print 'compacting', their_connections
        ndp1 = context.names[name1]
        ndp2 = context.names[name2]
        sname = '_'.join(sorted(s1s))
        
        #  space -- [mux] -- R -- [demux]
        space = ndp1.get_rtypes(s1s)

        N = len(their_connections)
        mux = Mux(space, [list(range(N))])
        muxndp = dpwrap(mux, s1s, sname)

        R = mux.get_res_space()

        coords = [(0, i) for i in range(N)]
        demux = Mux(R, coords)
        R2 = demux.get_res_space()
        assert space == R2, (space, R2)

        # example: R = PosetProduct((PosetProduct((A, B, C)),))
        #
        demuxndp = dpwrap(demux, sname, s2s)


        replace1 = connect2(ndp1, muxndp,
                            connections=set([Connection('*', s, '*', s) for s in s1s]),
                            split=[], repeated_ok=False)

        replace2 = connect2(demuxndp, ndp2,
                            connections=set([Connection('*', s, '*', s) for s in s2s]),
                            split=[], repeated_ok=False)

        context.names[name1] = replace1
        context.names[name2] = replace2

        context.connections = [x for x in context.connections
                                    if not x in their_connections]

        c = Connection(name1, sname, name2, sname)
        context.connections.append(c)
        return compact_context(context)
예제 #15
0
def enumerate_minimal_solution(G, edge_weight):
    """
        G: a graph
        edge_weight: a map from edge (i,j) of G to nonnegative weight
    """
    # Next optimization: consider equivalence classes of edges:
    # edges that belong to the same cycle. Then only keep the small ones.
    from mocdp.comp.connection import simple_cycles_as_edges

    State = namedtuple("State", "cycles weight")
    # set of edges removed -> state
    current_solutions = {}
    current_partial_solutions = {}
    examined = set()

    freeze = frozenset

    # initial states
    all_edges = set(G.edges())

    all_cycles = simple_cycles_as_edges(G)

    def belongs_to_cycles(e):
        for c in all_cycles:
            assert isinstance(c, tuple)
            if e in c:
                return True
        return False

    def cycles_for_edge(e):
        cycles = set()
        for c in all_cycles:
            assert isinstance(c, tuple)
            if e in c:
                cycles.add(c)
        return freeze(cycles)

    # these are the ones we care about
    edges_belonging_to_cycles = set([e for e in all_edges if belongs_to_cycles(e)])

    def get_edges_to_consider():
        # For each set of cycles, find which edges are in the equivalence class
        from collections import defaultdict

        cycles2edges = defaultdict(lambda: set())
        for e in edges_belonging_to_cycles:
            cycles = freeze(cycles_for_edge(e))
            cycles2edges[cycles].add(e)

        cycles2champion = {}
        cycles2weight = {}
        for cycles, edges in cycles2edges.items():
            logger.debug("Found %s edges that remove a set of %s cycles" % (len(edges), len(cycles)))

            best = min(edges, key=edge_weight)

            cycles2champion[cycles] = best
            cycles2weight[cycles] = edge_weight(best)

        def a_contains_b(ca, cb):
            return cb.issubset(ca)

        consider = set()
        for cycles1 in cycles2weight:
            # logger.debug('cycles')
            for cycles2 in cycles2weight:
                w1 = cycles2weight[cycles1]
                w2 = cycles2weight[cycles2]
                if a_contains_b(cycles2, cycles1) and w2 < w1:
                    # logger.debug('dominated')
                    break
            else:
                # not dominated
                consider.add(cycles2champion[cycles1])

        logger.debug("From %d to %d edges to consider" % (len(edges_belonging_to_cycles), len(consider)))
        return consider

    edges_to_consider = get_edges_to_consider()

    logger.debug("Deciding between %s hot of %d edges" % (len(edges_to_consider), len(all_edges)))

    best_weight = np.inf

    current_partial_solutions[freeze([])] = State(cycles=all_cycles, weight=0.0)

    while current_partial_solutions:
        # choose the solution to expand with minimum weight
        removed, state = pop_solution_minimum_weight(current_partial_solutions)
        examined.add(removed)
        logger.debug(
            "nsolutions %s best w %s / current_partial_solutions %s / removed %s"
            % (len(current_solutions), best_weight, len(current_partial_solutions), removed)
        )

        # now look at edges that we could remove
        to_remove = edges_to_consider - removed

        for edge in to_remove:
            new_weight = state.weight + edge_weight(edge)
            removed2 = set(removed)
            removed2.add(edge)
            removed2 = frozenset(removed2)

            if removed2 in examined:
                # print('do not consider')
                continue

            cycles = set([c for c in state.cycles if not edge in c])

            ss = State(cycles=cycles, weight=new_weight)
            if not cycles:
                current_solutions[removed2] = ss
                best_weight = min(best_weight, new_weight)
            else:
                if new_weight < best_weight:
                    current_partial_solutions[removed2] = ss

    solutions = list(current_solutions)
    weights = [current_solutions[k].weight for k in solutions]
    best = solutions[np.argmin(weights)]
    state = current_solutions[best]

    logger.debug("best: %s %s" % (best, state))
    return best