def test_recursion(): scope.define( Lambda( "Fact", [("x", p0)], expr=scope.switch(p0 > 1, 1, p0 * base.apply("Fact", p0 - 1)), )) print(scope.Fact(3)) assert rec_eval(scope.Fact(3)) == 6
def test_switch_and_Raise(): i = Literal() ab = scope.switch(i, 'a', 'b', scope.Raise(Exception)) assert rec_eval(ab, memo={i: 0}) == 'a' assert rec_eval(ab, memo={i: 1}) == 'b' assert_raises(Exception, rec_eval, ab, memo={i: 2})
def test_switch_and_Raise(): i = Literal() ab = scope.switch(i, "a", "b", scope.Raise(Exception)) assert rec_eval(ab, memo={i: 0}) == "a" assert rec_eval(ab, memo={i: 1}) == "b" assert_raises(Exception, rec_eval, ab, memo={i: 2})
def pchoice(*options): # Like hp.pchoice, but without a label. p, options = zip(*p_options) n_options = len(options) ch = scope.categorical(p, upper=n_options) return scope.switch(ch, *options)
def choice(*options): # Like hp.choice, but without a label. return scope.switch(scope.randint(len(options)), *options)
def _hyperopt_space(model_name, total_samples): use_pretrained = hp.choice('use_pretrained', [True, False]) common_param_space = { 'total_samples': total_samples, 'lr': hp.choice('lr', [0.1, 0.01, 0.001, 0.0001, 0.00001]), 'l1_lambda': hp.choice('l1_lambda', np.append(np.logspace(-7, -2, 6), 0)), 'dropout_p': hp.quniform('dropout_p', 0.0, 0.75, 0.05), 'margin_multiplier': hp.choice('margin_multiplier', [0.5, 0.75, 1.0, 1.25, 1.5]) } pre_trained_params = { True: { 'use_pretrained': True, 'l2_lambda': 0, 'dense_dim': 300, 'enable_fine_tune': hp.choice('enable_fine_tune', [True, False]) }, False: { 'use_pretrained': False, 'l2_lambda': hp.choice('l2_lambda', np.append(np.logspace(-7, -2, 6), 0)), 'dense_dim': scope.int(hp.quniform('dense_dim', 25, 325, 25)), 'enable_fine_tune': True # doesn't matter what goes here } } # the search space # note that the scope.int code is a hack to get integers out of the sampler if model_name == CITATION_RANKER_MODEL: ranker_model_params = { 'embedding_type': hp.choice('embedding_type', ['sum']), 'metadata_dim': scope.int(hp.quniform('metadata_dim', 5, 55, 5)), } space = scope.switch( scope.int(use_pretrained), { **pre_trained_params[False], **common_param_space, **ranker_model_params, }, { **pre_trained_params[True], **common_param_space, **ranker_model_params }) elif model_name == PAPER_EMBEDDING_MODEL: space = scope.switch(scope.int(use_pretrained), { **pre_trained_params[False], **common_param_space }, { **pre_trained_params[True], **common_param_space }) else: # Should not come here. Adding this to make pycharm happy. assert False return space
def test_recursion(): scope.define(Lambda('Fact', [('x', p0)], expr=scope.switch(p0 > 1, 1, p0 * base.apply('Fact', p0 - 1)))) print(scope.Fact(3)) assert rec_eval(scope.Fact(3)) == 6