Example #1
0
class SchedulerBaseTest(DependencyBaseTest):
    def setup_class(self):
        self.namespace = {
            'double': self.floatarraydescr,
            'float': self.float32arraydescr,
            'long': self.arraydescr,
            'int': self.int32arraydescr,
            'short': self.int16arraydescr,
            'char': self.chararraydescr,
        }

    def setup_method(self, name):
        self.vector_ext = VectorExt()
        self.vector_ext.enable(16, True)

    def pack(self, loop, l, r, input_type=None, output_type=None):
        return Pack(loop.graph.nodes[l:r])

    def schedule(self,
                 loop,
                 packs,
                 vec_reg_size=16,
                 prepend_invariant=False,
                 overwrite_funcs=None):
        cm = GenericCostModel(self.cpu, 0)
        cm.profitable = lambda: True
        pairs = []
        for pack in packs:
            for i in range(len(pack.operations) - 1):
                o1 = pack.operations[i]
                o2 = pack.operations[i + 1]
                pair = Pair(o1, o2)
                pairs.append(pair)
        packset = FakePackSet(pairs)
        state = VecScheduleState(loop.graph, packset, self.cpu, cm)
        for name, overwrite in (overwrite_funcs or {}).items():
            setattr(state, name, overwrite)
        renamer = Renamer()
        metainterp_sd = FakeMetaInterpStaticData(self.cpu)
        jitdriver_sd = FakeJitDriverStaticData()
        opt = VectorizingOptimizer(metainterp_sd, jitdriver_sd, 0)
        opt.packset = packset
        opt.combine_packset()
        opt.schedule(state)
        # works for now. might be the wrong class?
        # wrap label + operations + jump it in tree loop otherwise
        loop = state.graph.loop
        if prepend_invariant:
            loop.operations = loop.prefix + loop.operations
        return loop
Example #2
0
class SchedulerBaseTest(DependencyBaseTest):

    def setup_class(self):
        self.namespace = {
            'double': self.floatarraydescr,
            'float': self.float32arraydescr,
            'long': self.arraydescr,
            'int': self.int32arraydescr,
            'short': self.int16arraydescr,
            'char': self.chararraydescr,
        }

    def setup_method(self, name):
        self.vector_ext = VectorExt()
        self.vector_ext.enable(16, True)

    def pack(self, loop, l, r, input_type=None, output_type=None):
        return Pack(loop.graph.nodes[l:r])

    def schedule(self, loop, packs, vec_reg_size=16,
                 prepend_invariant=False, overwrite_funcs=None):
        cm = GenericCostModel(self.cpu, 0)
        cm.profitable = lambda: True
        pairs = []
        for pack in packs:
            for i in range(len(pack.operations)-1):
                o1 = pack.operations[i]
                o2 = pack.operations[i+1]
                pair = Pair(o1,o2)
                pairs.append(pair)
        packset = FakePackSet(pairs)
        state = VecScheduleState(loop.graph, packset, self.cpu, cm)
        for name, overwrite in (overwrite_funcs or {}).items():
            setattr(state, name, overwrite)
        renamer = Renamer()
        metainterp_sd = FakeMetaInterpStaticData(self.cpu)
        jitdriver_sd = FakeJitDriverStaticData()
        opt = VectorizingOptimizer(metainterp_sd, jitdriver_sd, 0)
        opt.packset = packset
        opt.combine_packset()
        opt.schedule(state)
        # works for now. might be the wrong class?
        # wrap label + operations + jump it in tree loop otherwise
        loop = state.graph.loop
        if prepend_invariant:
            loop.operations = loop.prefix + loop.operations
        return loop
Example #3
0
 def setup_method(self, name):
     self.vector_ext = VectorExt()
     self.vector_ext.enable(16, True)
Example #4
0
 def setup_method(self, name):
     self.vector_ext = VectorExt()
     self.vector_ext.enable(16, True)