Exemple #1
0
def run(name, d):

    print("building {0} ...".format(name))

    tf.reset_default_graph()

    d.define_holders(from_restore=False)

    model = CtrModel(holder_dict=d.feats_holder_dict,
                     sparse_dim_dict=d.sparse_dim_dict,
                     methods_list=name,
                     drop_rate=0.5,
                     dnn_h_list=[[1024, 1024, 1024, 1024], [512, 512, 512]],
                     embed_base=40.,
                     embed_exp=1 / 4.)

    loss, opt = model.get_loss_and_minimizer(labels=d.labels_holder,
                                             clip_by=100.)

    solver = Solver()

    lr_assign = tf.assign(model.learning_rate_tensor, 1e-4)

    with tf.Session() as sess:
        # write graph
        writer = tf.summary.FileWriter('./logs/' + name, sess.graph)
Exemple #2
0
def run(name, d, _list):
    for idx, struct in enumerate(_list):
        print("building model {0} with {1}".format(name, struct))

        tf.reset_default_graph()

        d.define_holders(from_restore=False)

        model = CtrModel(
            holder_dict=d.feats_holder_dict,
            sparse_dim_dict=d.sparse_dim_dict,
            methods_list=[name],
            dcn_degree=struct,
            drop_rate=None,
            dnn_h_list=struct,
            dnn_act_fn=tf.nn.relu,
            embed_w_scale=1e2,
            nn_w_scale=1e2,
            # nn_b_scale=1e2,
            lr_w_scale=1e2,
            # lr_b_scale=1e2,
            embed_base=10.,
            embed_exp=0.)

        loss, opt = model.get_loss_and_minimizer(labels=d.labels_holder,
                                                 method="momentum",
                                                 clip_by=50.)

        solver = Solver()

        d.with_train = False

        with tf.Session() as sess:
            # write graph
            # writer = tf.summary.FileWriter('./logs/'+name, sess.graph)
            sess.run(tf.global_variables_initializer())
            train_loss_hist, valid_loss_hist = solver.train(
                sess=sess,
                loss=loss,
                logits=model.logits,
                pred=model.predicts,
                learning_rate_tensor=model.learning_rate_tensor,
                data=d,
                minimizer=opt,
                auto_stop=True,
                phase=model.phase_tensor,
                global_step_tensor=model.global_step_tensor,
                learning_rate_init=1e-4,
                learning_rate_min=1e-6,
                save_path="./results/0403/{0}/".format(name),
                save_valid_loss=0.0820,
                with_ema=False,
                max_train_step=10000,
                tol=1,
                log_period=100)