示例#1
0
def test_badoptimization_opt_err():
    # This variant of test_badoptimization() replace the working code
    # with a new apply node that will raise an error.
    @gof.local_optimizer([theano.tensor.add])
    def insert_bigger_b_add(node):
        if node.op == theano.tensor.add:
            inputs = list(node.inputs)
            if inputs[-1].owner is None:
                inputs[-1] = theano.tensor.concatenate(
                    (inputs[-1], inputs[-1]))
                return [node.op(*inputs)]
        return False

    @gof.local_optimizer([theano.tensor.add])
    def insert_bad_dtype(node):
        if node.op == theano.tensor.add:
            inputs = list(node.inputs)
            if inputs[-1].owner is None:

                return [node.outputs[0].astype("float32")]
        return False

    edb = gof.EquilibriumDB()
    edb.register("insert_bigger_b_add", insert_bigger_b_add, "all")
    opt = edb.query("+all")
    edb2 = gof.EquilibriumDB()
    edb2.register("insert_bad_dtype", insert_bad_dtype, "all")
    opt2 = edb2.query("+all")

    a = theano.tensor.dvector()
    b = theano.tensor.dvector()

    f = theano.function([a, b], a + b, mode=debugmode.DebugMode(optimizer=opt))
    with pytest.raises(ValueError, match=r"insert_bigger_b_add"):
        f(
            [1.0, 2.0, 3.0],
            [2, 3, 4],
        )

    # Test that opt that do an illegal change still get the error from gof.
    with pytest.raises(theano.gof.toolbox.BadOptimization,
                       match=r"insert_bad_dtype") as einfo:
        with theano.change_flags(on_opt_error="raise"):
            f2 = theano.function(
                [a, b],
                a + b,
                mode=debugmode.DebugMode(optimizer=opt2, stability_patience=1),
            )
        f2(
            [1.0, 2.0, 3.0],
            [2, 3, 4],
        )

    # Test that we can reraise the error with an extended message
    with pytest.raises(theano.gof.toolbox.BadOptimization):
        e = einfo.value
        new_e = e.__class__("TTT" + str(e))
        exc_type, exc_value, exc_trace = sys.exc_info()
        exc_value = new_e
        reraise(e.__class__, exc_value, exc_trace)
示例#2
0
def test_stochasticoptimization():

    # this optimization alternates between triggering and not triggering.

    last_time_replaced = [False]

    @gof.local_optimizer([theano.tensor.add])
    def insert_broken_add_sometimes(node):
        if node.op == theano.tensor.add:
            last_time_replaced[0] = not last_time_replaced[0]
            if last_time_replaced[0]:
                return [off_by_half(*node.inputs)]
        return False

    edb = gof.EquilibriumDB()
    edb.register("insert_broken_add_sometimes", insert_broken_add_sometimes,
                 "all")
    opt = edb.query("+all")

    a = theano.tensor.dvector()
    b = theano.tensor.dvector()

    with pytest.raises(debugmode.StochasticOrder):
        theano.function(
            [a, b],
            theano.tensor.add(a, b),
            mode=debugmode.DebugMode(
                optimizer=opt,
                check_c_code=True,
                stability_patience=max(2, config.DebugMode.patience),
            ),
        )
示例#3
0
def test_badoptimization():
    @gof.local_optimizer([theano.tensor.add])
    def insert_broken_add(node):
        if node.op == theano.tensor.add:
            return [off_by_half(*node.inputs)]
        return False

    edb = gof.EquilibriumDB()
    edb.register('insert_broken_add', insert_broken_add, 'all')
    opt = edb.query('+all')

    a = theano.tensor.dvector()
    b = theano.tensor.dvector()

    f = theano.function([a, b], a + b, mode=debugmode.DebugMode(optimizer=opt))

    try:
        f(
            [1.0, 2.0, 3.0],
            [2, 3, 4],
        )
    except debugmode.BadOptimization as e:
        assert str(e.reason) == 'insert_broken_add'
        return  # TEST PASS

    assert False
示例#4
0
def test_stochasticoptimization():

    # this optimization alternates between triggering and not triggering.

    last_time_replaced = [False]

    @gof.local_optimizer([theano.tensor.add])
    def insert_broken_add_sometimes(node):
        if node.op == theano.tensor.add:
            last_time_replaced[0] = not last_time_replaced[0]
            if last_time_replaced[0]:
                return [off_by_half(*node.inputs)]
        return False

    edb = gof.EquilibriumDB()
    edb.register('insert_broken_add_sometimes', insert_broken_add_sometimes,
                 'all')
    opt = edb.query('+all')

    a = theano.tensor.dvector()
    b = theano.tensor.dvector()

    try:
        f = theano.function([a, b],
                            theano.tensor.add(a, b),
                            mode=debugmode.DebugMode(optimizer=opt,
                                                     check_c_code=True))
    except debugmode.StochasticOrder:
        return  #TEST PASS
    assert False
def test_badoptimization_opt_err():
    """This variant of test_badoptimization() replace the working code
    with a new apply node that will raise an error.

    """
    @gof.local_optimizer([theano.tensor.add])
    def insert_bigger_b_add(node):
        if node.op == theano.tensor.add:
            inputs = list(node.inputs)
            if inputs[-1].owner is None:
                inputs[-1] = theano.tensor.concatenate(
                    (inputs[-1], inputs[-1]))
                return [node.op(*inputs)]
        return False

    edb = gof.EquilibriumDB()
    edb.register('insert_bigger_b_add', insert_bigger_b_add, 'all')
    opt = edb.query('+all')

    a = theano.tensor.dvector()
    b = theano.tensor.dvector()

    f = theano.function([a, b], a + b, mode=debugmode.DebugMode(optimizer=opt))

    try:
        f(
            [1.0, 2.0, 3.0],
            [2, 3, 4],
        )
    except Exception as e:
        assert 'insert_bigger_b_add' in exc_message(e)
        return  # TEST PASS

    assert False
示例#6
0
def test_badoptimization():
    @gof.local_optimizer([theano.tensor.add])
    def insert_broken_add(node):
        if node.op == theano.tensor.add:
            return [off_by_half(*node.inputs)]
        return False

    edb = gof.EquilibriumDB()
    edb.register("insert_broken_add", insert_broken_add, "all")
    opt = edb.query("+all")

    a = theano.tensor.dvector()
    b = theano.tensor.dvector()

    f = theano.function([a, b], a + b, mode=debugmode.DebugMode(optimizer=opt))

    with pytest.raises(debugmode.BadOptimization) as einfo:
        f(
            [1.0, 2.0, 3.0],
            [2, 3, 4],
        )
    assert str(einfo.value.reason) == "insert_broken_add"
示例#7
0
文件: mode.py 项目: zmdfwh/Theano
# This should only remove nodes.
# The opt should not do anything that need shape inference.
# New nodes that don't have infer_shape need that the original node
# also don't have infer_shape
local_useless = gof.optdb.LocalGroupDB(apply_all_opts=True, profile=True)
optdb.register(
    'useless',
    gof.optdb.TopoDB(local_useless,
                     failure_callback=gof.opt.NavigatorOptimizer.warn_inplace),
    0.6, 'fast_run', 'fast_compile')

optdb.register('merge1.1', gof.MergeOptimizer(), 0.65, 'fast_run',
               'fast_compile', 'merge')

# rearranges elemwise expressions
optdb.register('canonicalize', gof.EquilibriumDB(ignore_newtrees=False), 1,
               'fast_run', 'fast_compile', 'canonicalize_db')
# Register in the canonizer Equilibrium as a clean up opt the merge opt.
# Without this, as the equilibrium have ignore_newtrees=False, we
# won't merge all nodes if it is set as a global optimizer with
# final_opt=True.

# We need a new instance of MergeOptimizer to don't have its name
# changed by other usage of it.
optdb['canonicalize'].register("merge",
                               gof.opt.MergeOptimizer(),
                               'fast_run',
                               "fast_compile",
                               cleanup=True)

optdb.register('merge1.2', gof.MergeOptimizer(), 1.2, 'fast_run',
示例#8
0
文件: mode.py 项目: yubow/Theano
    given point.
    """
    def __init__(self, header):
        self.header = header

    def apply(self, fgraph):
        import theano.printing
        print "PrintCurrentFunctionGraph:", self.header
        theano.printing.debugprint(fgraph.outputs)


optdb = gof.SequenceDB()
optdb.register('merge1', gof.MergeOptimizer(), 0, 'fast_run', 'fast_compile')

# rearranges elemwise expressions
optdb.register('canonicalize', gof.EquilibriumDB(), 1, 'fast_run',
               'fast_compile')

optdb.register('merge1.2', gof.MergeOptimizer(), 1.2, 'fast_run',
               'fast_compile')

optdb.register(
    'Print1.21',
    PrintCurrentFunctionGraph('Post-canonicalize'),
    1.21,
)  # 'fast_run', 'fast_compile')

# replace unstable subgraphs
optdb.register('stabilize', gof.EquilibriumDB(), 1.5, 'fast_run')

optdb.register(
示例#9
0
    gof.optdb.TopoDB(
        local_useless, failure_callback=gof.opt.NavigatorOptimizer.warn_inplace
    ),
    0.6,
    "fast_run",
    "fast_compile",
)

optdb.register(
    "merge1.1", gof.MergeOptimizer(), 0.65, "fast_run", "fast_compile", "merge"
)

# rearranges elemwise expressions
optdb.register(
    "canonicalize",
    gof.EquilibriumDB(ignore_newtrees=False),
    1,
    "fast_run",
    "fast_compile",
    "canonicalize_db",
)
# Register in the canonizer Equilibrium as a clean up opt the merge opt.
# Without this, as the equilibrium have ignore_newtrees=False, we
# won't merge all nodes if it is set as a global optimizer with
# final_opt=True.

# We need a new instance of MergeOptimizer to don't have its name
# changed by other usage of it.
optdb["canonicalize"].register(
    "merge", gof.opt.MergeOptimizer(), "fast_run", "fast_compile", cleanup=True
)
示例#10
0
文件: mode.py 项目: yanyan-cas/Theano
    """
    def __init__(self, header):
        self.header = header

    def apply(self, fgraph):
        import theano.printing
        print("PrintCurrentFunctionGraph:", self.header)
        theano.printing.debugprint(fgraph.outputs)


optdb = gof.SequenceDB()
optdb.register('merge1', gof.MergeOptimizer(),
               0, 'fast_run', 'fast_compile', 'merge')

# rearranges elemwise expressions
optdb.register('canonicalize', gof.EquilibriumDB(ignore_newtrees=False),
               1, 'fast_run', 'fast_compile', 'canonicalize_db')
# Register in the canonizer Equilibrium as a clean up opt the merge opt.
# Without this, as the equilibrium have ignore_newtrees=False, we
# won't merge all nodes if it is set as a global optimizer with
# final_opt=True.

# We need a new instance of MergeOptimizer to don't have its name
# changed by other usage of it.
optdb['canonicalize'].register("merge", gof.opt.MergeOptimizer(), 'fast_run',
                               "fast_compile", cleanup=True)

optdb.register('merge1.2', gof.MergeOptimizer(),
               1.2, 'fast_run', 'fast_compile', 'merge')

optdb.register('Print1.21', PrintCurrentFunctionGraph('Post-canonicalize'),
示例#11
0
    """
    def __init__(self, header):
        self.header = header

    def apply(self, fgraph):
        import theano.printing
        print "PrintCurrentFunctionGraph:", self.header
        theano.printing.debugprint(fgraph.outputs)


optdb = gof.SequenceDB()
optdb.register('merge1', gof.MergeOptimizer(),
        0, 'fast_run', 'fast_compile', 'merge')

# rearranges elemwise expressions
optdb.register('canonicalize', gof.EquilibriumDB(),
        1, 'fast_run', 'fast_compile')

optdb.register('merge1.2', gof.MergeOptimizer(),
        1.2, 'fast_run', 'fast_compile', 'merge')

optdb.register('Print1.21', PrintCurrentFunctionGraph('Post-canonicalize'),
        1.21,)  # 'fast_run', 'fast_compile')

# replace unstable subgraphs
optdb.register('stabilize', gof.EquilibriumDB(),
        1.5, 'fast_run')

optdb.register('Print1.51', PrintCurrentFunctionGraph('Post-stabilize'),
        1.51,)  # 'fast_run', 'fast_compile')
示例#12
0
def test_badoptimization_opt_err():
    # This variant of test_badoptimization() replace the working code
    # with a new apply node that will raise an error.
    @gof.local_optimizer([theano.tensor.add])
    def insert_bigger_b_add(node):
        if node.op == theano.tensor.add:
            inputs = list(node.inputs)
            if inputs[-1].owner is None:
                inputs[-1] = theano.tensor.concatenate(
                    (inputs[-1], inputs[-1]))
                return [node.op(*inputs)]
        return False

    @gof.local_optimizer([theano.tensor.add])
    def insert_bad_dtype(node):
        if node.op == theano.tensor.add:
            inputs = list(node.inputs)
            if inputs[-1].owner is None:

                return [node.outputs[0].astype('float32')]
        return False

    edb = gof.EquilibriumDB()
    edb.register('insert_bigger_b_add', insert_bigger_b_add, 'all')
    opt = edb.query('+all')
    edb2 = gof.EquilibriumDB()
    edb2.register('insert_bad_dtype', insert_bad_dtype, 'all')
    opt2 = edb2.query('+all')

    a = theano.tensor.dvector()
    b = theano.tensor.dvector()

    f = theano.function([a, b], a + b, mode=debugmode.DebugMode(optimizer=opt))
    try:
        f(
            [1.0, 2.0, 3.0],
            [2, 3, 4],
        )
    except ValueError as e:
        assert 'insert_bigger_b_add' in exc_message(e)
    else:
        assert False

    # Test that opt that do an illegal change still get the error from gof.
    try:
        with theano.change_flags(on_opt_error='raise'):
            f2 = theano.function([a, b],
                                 a + b,
                                 mode=debugmode.DebugMode(
                                     optimizer=opt2, stability_patience=1))
        f2(
            [1.0, 2.0, 3.0],
            [2, 3, 4],
        )
    except theano.gof.toolbox.BadOptimization as e:
        assert 'insert_bad_dtype' in str(e)
        # Test that we can reraise the error with an extended message
        try:
            new_e = e.__class__("TTT" + str(e))
            exc_type, exc_value, exc_trace = sys.exc_info()
            exc_value = new_e
            reraise(e.__class__, exc_value, exc_trace)
        except theano.gof.toolbox.BadOptimization as e:
            pass
        else:
            assert False
    else:
        assert False
示例#13
0
class PrintCurrentEnv(gof.Optimizer):
    """This optimizer is for debugging.

    Toss it into the optimization pipeline to see the state of things at any given point.
    """
    def __init__(self, header):
        self.header =header
    def apply(self, env):
        import theano.printing
        print "PrintCurrentEnv:", self.header
        theano.printing.debugprint(env.outputs)

optdb = gof.SequenceDB()
optdb.register('merge1', gof.MergeOptimizer(),
        0, 'fast_run', 'fast_compile')
optdb.register('canonicalize', gof.EquilibriumDB(),         # rearranges elemwise expressions
        1, 'fast_run', 'fast_compile')
optdb.register('merge1.2', gof.MergeOptimizer(skip_const_merge=False),
        1.2, 'fast_run', 'fast_compile')
optdb.register('Print1.21', PrintCurrentEnv('Post-canonicalize'),
        1.21,)# 'fast_run', 'fast_compile')

optdb.register('stabilize', gof.EquilibriumDB(),            # replace unstable subgraphs
        1.5, 'fast_run')
optdb.register('Print1.51', PrintCurrentEnv('Post-stabilize'),
        1.51,) #'fast_run', 'fast_compile')
optdb.register('specialize', gof.EquilibriumDB(),           # misc special cases for speed
        2, 'fast_run')
optdb.register('Print2.01', PrintCurrentEnv('Post-specialize'),
        2.01, )#'fast_run', 'fast_compile')
optdb.register('uncanonicalize', gof.EquilibriumDB(),# misc special cases for speed that break canonicalization