def get_rnn_sum(input_seq,name='cha',nameln = 'sec'):
    
    #with tf.name_scope("GRU"):
    num_layers=2
    HIDDEN_DIM=128
    KEEP_PROB =0.8
    with tf.name_scope('cell'):
        def build_cell(n,m):
            cell = tf.nn.rnn_cell.GRUCell(n)                  
            cell = tf.nn.rnn_cell.DropoutWrapper(cell, output_keep_prob=m)
            return cell

        num_units=[HIDDEN_DIM//2,HIDDEN_DIM//2]
        
        cell_fw = [build_cell(n,KEEP_PROB) for n in num_units]
        cell_bw = [build_cell(n,KEEP_PROB) for n in num_units]

    with tf.name_scope('gru'),tf.variable_scope("gru", reuse=tf.AUTO_REUSE):
        biout,output_fw,output_bw = tf.contrib.rnn.stack_bidirectional_dynamic_rnn(
            cell_fw,cell_bw,inputs= input_seq,dtype=tf.float32,scope=name)

        #这里的 name 和上面 一样吗,还是 每个不一样,再加个name2 

        # lnout = tf.contrib.layers.layer_norm(
        # inputs=(biout + input_seq), begin_norm_axis=-1, begin_params_axis=-1, scope=nameln)
        

    lnout = tf.contrib.layers.layer_norm(
      inputs=(biout + input_seq), begin_norm_axis=-1, begin_params_axis=-1)
    
    rnnoutput = tf.reduce_mean(lnout, axis=-2)

        #attention_output = layer_norm(biout + input_seq)
    return rnnoutput
    def build_fcn_net(self, inp, use_dice=False):
        with self.graph.as_default():
            self.saver = tf.train.Saver(max_to_keep=1)


            with tf.name_scope("Out"):
                bn1 = tf.layers.batch_normalization(inputs=inp, name='bn1')
                dnn1 = tf.layers.dense(bn1, 200, activation=None, name='f1')
                if use_dice:
                    dnn1 = dice(dnn1, name='dice_1')
                else:
                    dnn1 = prelu(dnn1, 'prelu1')

                dnn2 = tf.layers.dense(dnn1, 80, activation=None, name='f2')
                if use_dice:
                    dnn2 = dice(dnn2, name='dice_2')
                else:
                    dnn2 = prelu(dnn2, 'prelu2')
                dnn3 = tf.layers.dense(dnn2, 2, activation=None, name='f3')
                self.y_hat = tf.nn.softmax(dnn3) + 0.00000001

            with tf.name_scope('Metrics'):
                # Cross-entropy loss and optimizer initialization
                # 'core_type_ph': [1, 1, 0,..],

                ctr_loss = - tf.reduce_mean(tf.log(self.y_hat) * self.target_ph)
                self.loss = ctr_loss
                # tf.summary.scalar('loss', self.loss)
                self.optimizer = tf.train.AdamOptimizer(learning_rate=self.lr_ph).minimize(self.loss)
                # self.optimizer = tf.train.GradientDescentOptimizer(learning_rate=self.lr_ph).minimize(self.loss)
                # Accuracy metric
                self.accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.round(self.y_hat), self.target_ph), tf.float32))
                # tf.summary.scalar('accuracy', self.accuracy)

            self.merged = tf.summary.merge_all()
def get_rnn_sum(input_seq, name='cha'):

    #with tf.name_scope("GRU"):
    num_layers = 2
    HIDDEN_DIM = 128
    KEEP_PROB = 0.8
    with tf.name_scope('cell'):

        def build_cell(n, m):
            cell = tf.nn.rnn_cell.GRUCell(n)
            cell = tf.nn.rnn_cell.DropoutWrapper(cell, output_keep_prob=m)
            return cell

        num_units = [HIDDEN_DIM // 2, HIDDEN_DIM // 2]

        cell_fw = [build_cell(n, KEEP_PROB) for n in num_units]
        cell_bw = [build_cell(n, KEEP_PROB) for n in num_units]

    with tf.name_scope('gru'):
        biout, output_fw, output_bw = tf.contrib.rnn.stack_bidirectional_dynamic_rnn(
            cell_fw, cell_bw, inputs=input_seq, dtype=tf.float32, scope=name)

        rnnoutput = tf.reduce_sum(biout, axis=-2)

    return rnnoutput
Exemple #4
0
 def __init__(self, dim, samples
              , init_w= tf.random_uniform_initializer(minval= -0.01, maxval= 0.01)
              , ftype= tf.float32, scope= 'dbn'):
     self.dim, self.ftype = dim, ftype
     with tf.variable_scope(scope):
         self.rbm = tuple(
             Rbm(scope= "rbm{}".format(i)
                 , dim_v= dim_v
                 , dim_h= dim_h
                 , samples= samples
                 , init_w= init_w
                 , ftype= self.ftype)
             for i, (dim_v, dim_h) in enumerate(zip(dim, dim[1:]), 1))
         self.w = tuple(rbm.w for rbm in self.rbm[::-1])
         self.wg = tuple(tf.transpose(w) for w in self.w)
         self.wr = tuple(
             tf.get_variable(name= "wr{}".format(i), shape= (dim_d, dim_a), initializer= init_w)
             for i, (dim_d, dim_a) in enumerate(zip(self.dim, self.dim[1:]), 1))
         self.lr_ = tf.placeholder(name= 'lr_', dtype= self.ftype, shape= ())
         # wake
         self.v_ = self.rbm[0].v_
         with tf.name_scope('wake'):
             recogn = [self.v_]
             for w in self.wr: recogn.append(binary(tf.matmul(recogn[-1], w)))
             self.recogn = tuple(recogn)
             recogn = recogn[::-1]
             eps = self.lr_ / tf.cast(tf.shape(self.v_)[0], dtype= self.ftype)
             self.wake = tuple(
                 w.assign_add(tf.matmul((sj - pj), sk, transpose_a= True) * eps).op
                 for w, sk, sj, pj in zip(
                         self.w, recogn, recogn[1:]
                         , (tf.sigmoid(tf.matmul(s, w))
                            for w, s in zip(self.wg, recogn))))
         # sleep
         top = self.rbm[-1]
         self.k_, (self.v, self.a) = top.k_, top.gibbs
         with tf.name_scope('sleep'):
             recons = [self.a, self.v]
             for w in self.wg[1::]: recons.append(binary(tf.matmul(recons[-1], w)))
             self.recons = tuple(recons)
             recons = recons[::-1]
             eps = self.lr_ / tf.cast(tf.shape(self.a)[0], dtype= self.ftype)
             self.sleep = tuple(
                 w.assign_add(tf.matmul(sj, (sk - qk), transpose_a= True) * eps).op
                 for w, sj, sk, qk in zip(
                         self.wr, recons, recons[1:]
                         , (tf.sigmoid(tf.matmul(s, w))
                            for w, s in zip(self.wr, recons))))
         # the waking world is the amnesia of dream.
         self.v = self.recons[-1]
         with tf.name_scope('ances'):
             self.a = self.rbm[-1].h
             ances = [self.a]
             for w in self.wg: ances.append(binary(tf.matmul(ances[-1], w)))
             self.ances = ances[-1]
         self.step = 0
Exemple #5
0
    def __init__(self, dim_v, dim_h, samples
                 , init_w= tf.random_uniform_initializer(minval= -0.01, maxval= 0.01)
                 , ftype= tf.float32
                 , scope= 'rbm'):
        self.dim_v, self.dim_h, self.ftype, self.scope = dim_v, dim_h, ftype, scope
        with tf.variable_scope(scope):
            # todo add bias
            self.w = tf.get_variable(name= 'w', shape= (self.dim_v, self.dim_h), initializer= init_w)
            # positive stage: inference
            self.v_ = tf.placeholder(name= 'v_', dtype= self.ftype, shape= (None, self.dim_v))
            with tf.name_scope('hgv'):
                self.hgv = tf.sigmoid(tf.matmul(self.v_, self.w))
            # self.act_h = binary(self.hgv, transform= False, threshold= None)
            # self.h_ = tf.placeholder(name= 'h_', dtype= self.ftype, shape= (None, self.dim_h))
            # self.vgh = tf.matmul(self.h_, self.w, transpose_b= True)
            # self.act_v = binary(self.vgh, transform= False, threshold= None)

            with tf.name_scope('pos'):
                self.pos = tf.matmul(self.v_, self.hgv, transpose_a= True)
                self.pos /= tf.cast(tf.shape(self.v_)[0], dtype= self.ftype)
            # negative stage: stochastic approximation
            self.v = binary_variable(name= 'v', shape= (samples, self.dim_v), dtype= self.ftype)
            self.h = binary_variable(name= 'h', shape= (samples, self.dim_h), dtype= self.ftype)
            self.k_ = tf.placeholder(name= 'k_', dtype= tf.int32, shape= ())

            def gibbs(v, _h):
                h = binary(tf.matmul(v, self.w))
                v = binary(tf.matmul(h, self.w, transpose_b= True))
                # todo real valued v
                # v = tf.sigmoid(tf.matmul(h, self.w, transpose_b= True))
                return v, h

            with tf.name_scope('gibbs'):
                vh = self.v, self.h
                v, h = self.gibbs = tuple(
                    tf.assign(x, x2, validate_shape= False) for x, x2 in zip(
                        vh, tf.while_loop(
                            loop_vars= (self.k_, vh)
                            , cond= lambda k, vh: (0 < k)
                            , body= lambda k, vh: (k - 1, gibbs(*vh)))[1]))

            with tf.name_scope('neg'):
                # todo update with real probabilities instead of binaries
                h = tf.sigmoid(tf.matmul(v, self.w))
                v = tf.sigmoid(tf.matmul(h, self.w, transpose_b= True))
                self.neg = tf.matmul(v, h, transpose_a= True)
                self.neg /= tf.cast(tf.shape(self.v)[0], dtype= self.ftype)
            self.lr_ = tf.placeholder(name= 'lr_', dtype= self.ftype, shape= ())
            with tf.name_scope('up'):
                self.up = self.w.assign_add((self.pos - self.neg) * self.lr_).op
            self.step = 0
Exemple #6
0
 def __init__(self, dim, samples
              , init_w= tf.random_uniform_initializer(minval= -0.01, maxval= 0.01)
              , ftype= tf.float32, scope= 'sbn'):
     self.dim, self.ftype, self.scope = dim, ftype, scope
     with tf.variable_scope(scope):
         self.wr = tuple(
             tf.get_variable(name= "wr{}".format(i), shape= (dim_d, dim_a), initializer= init_w)
             for i, (dim_d, dim_a) in enumerate(zip(self.dim, self.dim[1:]), 1))
         self.wg = tuple(
             tf.get_variable(name= "wg{}".format(i), shape= (dim_a, dim_d), initializer= init_w)
             for i, (dim_d, dim_a) in enumerate(zip(self.dim, self.dim[1:]), 1))[::-1]
         self.lr_ = tf.placeholder(name= 'lr_', dtype= self.ftype, shape= ())
         # wake
         self.v_ = tf.placeholder(name= 'v_', dtype= self.ftype, shape= (None, self.dim[0]))
         with tf.name_scope('wake'):
             recogn = [self.v_]
             for w in self.wr: recogn.append(binary(tf.matmul(recogn[-1], w)))
             self.recogn = tuple(recogn)
             recogn = recogn[::-1]
             eps = self.lr_ / tf.cast(tf.shape(self.v_)[0], dtype= self.ftype)
             self.wake = tuple(
                 w.assign_add(tf.matmul(sk, (sj - pj), transpose_a= True) * eps).op
                 for w, sk, sj, pj in zip(
                         self.wg, recogn, recogn[1:]
                         , (tf.sigmoid(tf.matmul(s, w))
                            for w, s in zip(self.wg, recogn))))
         # sleep
         with tf.name_scope('a'):
             self.a = tf.round(tf.random_uniform(shape= (samples, self.dim[-1])))
         with tf.name_scope('sleep'):
             recons = [self.a]
             for w in self.wg: recons.append(binary(tf.matmul(recons[-1], w)))
             self.recons = tuple(recons)
             recons = recons[::-1]
             eps = self.lr_ / tf.cast(tf.shape(self.a)[0], dtype= self.ftype)
             self.sleep = tuple(
                 w.assign_add(tf.matmul(sj, (sk - qk), transpose_a= True) * eps).op
                 for w, sj, sk, qk in zip(
                         self.wr, recons, recons[1:]
                         , (tf.sigmoid(tf.matmul(s, w))
                            for w, s in zip(self.wr, recons))))
         # the waking world is the amnesia of dream.
         self.v = self.recons[-1]
         self.step = 0
    def __init__(self, *, use_dice=False):
        super().__init__(use_dice=use_dice)
        self.other_inputs()

        teacher = [
            #self.teacher_id_embedded,
            self.province_id_embedded,
            self.city_id_embedded,
            self.core_type_embedded,
            self.student_count_embedded, ]
        # 0-4
        clazz = [
            #self.class_id_embedded,
            self.edition_id_embedded,
            self.grade_id_embedded,
            self.class_student_embedded,
            self.cap_avg_embedded,
            self.cap_max_embedded,
            self.cap_min_embedded, ]
        # 5-11
        study = [
            self.study_vector_embedded,
            self.gap_days_embedded, ]
        # 12-13
        submit = [self.month_submit_rate_embedded,
                  ]
        # 14
        capacity = [self.region_capacity_embedded,
                    ]
        # 15
        prefer = [self.prefer_assign_time_avg_embedded,
                  self.prefer_assign_time_var_embedded,
                  self.prefer_assign_rank_avg_embedded,
                  self.prefer_assign_rank_var_embedded, ]
        # 16-19
        register = [self.register_diff_embedded,
                    ]
        # 20
        homeworkcount = [self.homework_count_embedded,
                         ]
        # 21
        weekcount = [self.week_count_embedded,
                     ]
        # 22
        lastday = [self.lastday_count_embedded,
                   ]
        # 23
        study_analysis = [self.analysis_avg_times_embedded,
        self.analysis_avg_exp_score_embedded,
        self.analysis_avg_rate_embedded,
        self.analysis_avg_exp_level_embedded

        ]


        o = teacher + clazz + study + submit + \
            capacity + prefer + register + homeworkcount + weekcount + lastday+study_analysis

        # here, we do like this, not tf.concat(o,axis=-1),
        # because, issue:https://github.com/tensorflow/tensorflow/issues/24816
        # ps: style,homework,reflect don't need to do like this, proved.
        others = o[0]
        for i in o[1:]:
            others = tf.concat([others, i], axis=-1)
        others = [others]

        # style = []
        # for fir in ["1", "2", "3", "4"]:
        #     for sec in ["100", "010", "001", "110", "101", "011", "111"]:
        #         embed_key = "style_" + fir + "0" + sec + "_embedded"
        #         style.append(getattr(self, embed_key))

        homework = []
        homework.append(self.today_style_embedded)
        homework.append(tf.concat([self.today_cha_rnn,self.today_sec_rnn],axis=-1))
        homework.append(self.history_chap_embedded)
        homework.append(self.history_chap_embedded *tf.concat([self.today_cha_rnn,self.today_sec_rnn,self.today_style_embedded],axis=-1))

        reflect = []
        reflect.append(self.ref_rnn )

        with self.graph.as_default():
            with tf.name_scope("Concat"):
                inps = tf.concat(others +  homework + reflect, -1)
        self.build_fcn_net(inps, self.use_dice)
    def __init__(self, *, use_dice=False):
        self.graph = tf.Graph()
        self.tensor_info = {}
        self.use_dice = use_dice

        with self.graph.as_default():
            # Main Inputs
            with tf.name_scope('Main_Inputs'):

                self.target_ph = tf.placeholder(tf.float32, [None, None], name='target_ph')
                self.lr_ph = tf.placeholder(tf.float32, [], name="lr_ph")

                # with tf.name_scope("Teacher_Info"):
                self.teacher_id_ph = tf.placeholder(tf.int32, [None, ], name="teacher_id_ph")
                self.student_count_ph = tf.placeholder(tf.int32, [None, ], name="student_count_ph")
                self.province_id_ph = tf.placeholder(tf.int32, shape=[None, ], name="province_id_ph")
                self.city_id_ph = tf.placeholder(tf.int32, shape=[None, ], name="city_id_ph")
                # TODO: binary 是否金牌讲师
                self.core_type_ph = tf.placeholder(tf.int32, shape=[None, ], name="core_type_ph")

                # with tf.name_scope("Class_Info"):
                #今天 教的班级id
                self.class_id_ph = tf.placeholder(tf.int32, [None, ], name="class_id_ph")
                #课本版本
                self.edition_id_ph = tf.placeholder(tf.int32, [None, ], name="edition_id_ph")
                self.grade_id_ph = tf.placeholder(tf.int32, [None, ], name="grade_id_ph")
                #老师教的所有班级 学生总人数, int连续特征
                self.class_student_ph = tf.placeholder(tf.int32, [None, ], name="class_student_ph")
                #kefei
                #浮点连续特征
                self.cap_avg_ph = tf.placeholder(tf.float32, [None, ], name="cap_avg_ph")
                self.cap_max_ph = tf.placeholder(tf.float32, [None, ], name="cap_max_ph")
                self.cap_min_ph = tf.placeholder(tf.float32, [None, ], name="cap_min_ph")


                # with tf.name_scope("Homework_Info"):
                #候选, 召回集  首先天宇会给一个初步刷选的作业集,好几组,每组进去很多题目,暂且不管
                #粒度暂且放在一组,上,看做一个  作业,  特征属性两个chapters  sections
                #这一组中,今天这个老师 id,,布置了某一组,则吧这个 label 为1   其他布置的  为0   这样就构造了样本
                #另外chapters  sections分别可能是几个数字®️的,  类似于多lable吧, 为了保持统一长度, 所以补零
                self.today_chapters_ph = tf.placeholder(tf.int32, [None, None], name="today_chapters_ph")
                self.today_sections_ph = tf.placeholder(tf.int32, [None, None], name="today_sections_ph")
                
                #没用
                self.today_chap_mask_ph = tf.placeholder(tf.float32, [None, None], name='today_chap_mask_ph')
                self.today_chap_len_ph = tf.placeholder(tf.int32, [None, ], name='today_chap_len_ph')
                self.today_sec_mask_ph = tf.placeholder(tf.float32, [None, None], name='today_sec_mask_ph')
                self.today_sec_len_ph = tf.placeholder(tf.int32, [None, ], name='today_sec_len_ph')
                #作业的风格  是这道题的 风格, 什么预习啊 什么深度啊,,
                self.today_style_ph = tf.placeholder(tf.int32, [None, ], name='today_style_ph')

                # TODO: use three dims to capture more history info
                #这个是 这个班级前三天 给布置的作业, 比如,昨天的,仍是两个特征来表征,chap sec 每个都是多个数字,
                #所以N N 
                for fir in ['one', 'two', 'three', 'four','five','six','seven','eight','nine','ten','eleven','twelve','thirteen','fourteen']:
                    key_s = "history_" + fir + "_sec_ph"
                    sty = "style_" + fir + "_ph"
                    key_c = "history_" + fir + "_chap_ph"
                    setattr(self, key_c,
                                tf.placeholder(tf.int32, [None, None], name=key_c))
                    setattr(self, key_s,
                                tf.placeholder(tf.int32, [None, None], name=key_s))
                    setattr(self, sty,
                                tf.placeholder(tf.int32, [None,], name=sty))

                # TODO: All belows should consider the type and input
                # with tf.name_scope("Study_Info"):
                # TODO: study_vector_ph's type can change?
                #kefei  这个班级的学习能力  类似于期中考试,  这个班级  表征为 20维的向量  int
                self.study_vector_ph = tf.placeholder(tf.float32, [None, 20], name="study_vector_ph")
                #上面的结果  什么时候评测的    连续值,隔的天数
                self.gap_days_ph = tf.placeholder(tf.int32, [None, ], name="gap_days_ph")

                
                self.analysis_avg_times_ph = tf.placeholder(tf.float32, [None, ], name="analysis_avg_times_ph")
                self.analysis_avg_rate_ph = tf.placeholder(tf.float32, [None, ], name="analysis_avg_rate_ph")
                self.analysis_avg_exp_score_ph = tf.placeholder(tf.float32, [None, ], name="analysis_avg_exp_score_ph")
                self.analysis_avg_exp_level_ph = tf.placeholder(tf.float32, [None, ], name="analysis_avg_exp_level_ph")

                # with tf.name_scope("Submit_Info"):  这个班级 一个月的app内 作业提交率 ,连续float特征
                self.month_submit_rate_ph = tf.placeholder(tf.float32, [None, ], name="month_submit_rate_ph")

                # with tf.name_scope("Capacity_Info"):  地区区域整体能力   也是  float连续特征
                self.region_capacity_ph = tf.placeholder(tf.float32, [None, ], name="region_capacity_ph")

                # with tf.name_scope("Prefer_Info"):
                #老师 在这个班级 上,,喜欢布置作业的 难度 和时间   float连续值特征
                self.prefer_assign_time_avg_ph = tf.placeholder(tf.float32, [None, ],
                                                                name="prefer_assign_time_avg_ph")
                self.prefer_assign_time_var_ph = tf.placeholder(tf.float32, [None, ],
                                                                name="prefer_assign_time_var_ph")
                self.prefer_assign_rank_avg_ph = tf.placeholder(tf.float32, [None, ],
                                                                name="prefer_assign_rank_avg_ph")
                self.prefer_assign_rank_var_ph = tf.placeholder(tf.float32, [None, ],
                                                                name="prefer_assign_rank_var_ph")

                # with tf.name_scope("Register_Info"):  老师 注册app的 时间,int连续值特征
                self.register_diff_ph = tf.placeholder(tf.int32, [None, ], name="register_diff_ph")

                # with tf.name_scope("HomeworkCount_Info"):  老师 布置了多少题目   int连续值特征
                #是总共布置的吗  从注册app???
                self.homework_count_ph = tf.placeholder(tf.int32, [None, ], name="homework_count_ph")

                # with tf.name_scope("Style_Info"):
                # TODO: use 3 dims  老师 作业 的风格  ?  一个特征域
                # for fir in ["1", "2", "3", "4"]:
                #     for sec in ["100", "010", "001", "110", "101", "011", "111"]:
                #         key = "style_" + fir + "0" + sec + "_ph"
                #         setattr(self, key,
                #                 tf.placeholder(tf.int32, [None, ], name=key))

                # with tf.name_scope("WeekHomeworkCount_Info"):
                #这周 老师布置作业,,作业率,,怎么float ??   连续值特征
                self.week_count_ph = tf.placeholder(tf.float32, [None, ], name="week_count_ph")

                # with tf.name_scope("Reflect_Info"):
                # TODO: explore more graceful  映射 作业类目
                self.reflect_value_ph = tf.placeholder(tf.int32, [None, None], name="reflect_value_ph")
                self.reflect_mask_ph = tf.placeholder(tf.float32, [None, None], name="reflect_mask_ph")
                self.reflect_len_ph = tf.placeholder(tf.int32, [None, ], name="reflect_len_ph")

                # with tf.name_scope("Lastdat_Info"):  昨天布置的 个数  int连续实值
                self.lastday_count_ph = tf.placeholder(tf.int32, [None, ], name="lastday_count_ph")

            # Embedding layer
            with tf.name_scope('Main_Embedding_layer'):
                # almost done
                with tf.name_scope("Others"):
                    # teacher
                    with tf.name_scope("Teacher"):
                        self.teacher_id_embeddings_var = tf.get_variable("teacher_id_embeddings_var",
                                                                         [N_TEACHER, EMBEDDING_DIM], )
                        # tf.summary.histogram('teacher_id_embeddings_var', self.teacher_id_embeddings_var)
                        self.teacher_id_embedded = tf.nn.embedding_lookup(self.teacher_id_embeddings_var,
                                                                          self.teacher_id_ph, )

                        self.province_id_embeddings_var = tf.get_variable("province_id_embeddings_var",
                                                                          [N_PROVINCE, EMBEDDING_DIM])
                        # tf.summary.histogram('province_id_embeddings_var', self.province_id_embeddings_var)
                        self.province_id_embedded = tf.nn.embedding_lookup(self.province_id_embeddings_var,
                                                                           self.province_id_ph)

                        self.city_id_embeddings_var = tf.get_variable("city_id_embeddings_var",
                                                                      [N_CITY, EMBEDDING_DIM])
                        # tf.summary.histogram('city_id_embeddings_var', self.city_id_embeddings_var)
                        self.city_id_embedded = tf.nn.embedding_lookup(self.city_id_embeddings_var,
                                                                       self.city_id_ph)

                        self.core_type_embeddings_var = tf.get_variable("core_type_embeddings_var",
                                                                        [2, EMBEDDING_DIM])
                        # tf.summary.histogram('core_type_embeddings_var', self.core_type_embeddings_var)
                        self.core_type_embedded = tf.nn.embedding_lookup(self.core_type_embeddings_var,
                                                                         self.core_type_ph)
                        # just to use embedded for var,maybe tf.identify?
                        self.student_count_embedded = get_self_or_expand_dims(self.student_count_ph)

                    with tf.name_scope("Class"):
                        self.class_id_embeddings_var = tf.get_variable("class_id_embeddings_var",
                                                                       [N_CLASS, EMBEDDING_DIM])
                        # tf.summary.histogram('class_id_embeddings_var', self.class_id_embeddings_var)
                        self.class_id_embedded = tf.nn.embedding_lookup(self.class_id_embeddings_var,
                                                                        self.class_id_ph)

                        self.edition_id_embeddings_var = tf.get_variable("edition_id_embeddings_var",
                                                                         [N_EDITION, EMBEDDING_DIM])
                        # tf.summary.histogram('edition_id_embeddings_var', self.edition_id_embeddings_var)
                        self.edition_id_embedded = tf.nn.embedding_lookup(self.edition_id_embeddings_var,
                                                                          self.edition_id_ph)

                        self.grade_id_embeddings_var = tf.get_variable("grade_id_embeddings_var",
                                                                       [N_GRADE, EMBEDDING_DIM])
                        # tf.summary.histogram('grade_id_embeddings_var', self.grade_id_embeddings_var)
                        self.grade_id_embedded = tf.nn.embedding_lookup(self.grade_id_embeddings_var,
                                                                        self.grade_id_ph)
                        # just to use embedded for var,maybe tf.identify?
                        #连续值 dense 本身有意义的直接喂入
                        self.class_student_embedded = get_self_or_expand_dims(self.class_student_ph)
                        self.cap_avg_embedded = get_self_or_expand_dims(self.cap_avg_ph)
                        self.cap_max_embedded = get_self_or_expand_dims(self.cap_max_ph)
                        self.cap_min_embedded = get_self_or_expand_dims(self.cap_min_ph)

                    with tf.name_scope("Study"):
                        # just to use embedded for var,maybe tf.identify?
                        self.study_vector_embedded = self.study_vector_ph
                        self.gap_days_embedded = get_self_or_expand_dims(self.gap_days_ph)
                    with tf.name_scope("Study_analysis"):
                        self.analysis_avg_times_embedded  = get_self_or_expand_dims(self.analysis_avg_times_ph)
                        self.analysis_avg_rate_embedded  = get_self_or_expand_dims(self.analysis_avg_rate_ph)
                        self.analysis_avg_exp_score_embedded  = get_self_or_expand_dims(self.analysis_avg_exp_score_ph)
                        self.analysis_avg_exp_level_embedded  = get_self_or_expand_dims(self.analysis_avg_exp_level_ph)


                    with tf.name_scope("Submit"):
                        # just to use embedded for var,maybe tf.identify?
                        self.month_submit_rate_embedded = get_self_or_expand_dims(self.month_submit_rate_ph)

                    with tf.name_scope("Capacity"):
                        # just to use embedded for var,maybe tf.identify?
                        self.region_capacity_embedded = get_self_or_expand_dims(self.region_capacity_ph)

                    with tf.name_scope("Prefer"):
                        # just to use embedded for var,maybe tf.identify?
                        self.prefer_assign_time_avg_embedded = get_self_or_expand_dims(
                            self.prefer_assign_time_avg_ph)
                        self.prefer_assign_time_var_embedded = get_self_or_expand_dims(
                            self.prefer_assign_time_var_ph)
                        self.prefer_assign_rank_avg_embedded = get_self_or_expand_dims(
                            self.prefer_assign_rank_avg_ph)
                        self.prefer_assign_rank_var_embedded = get_self_or_expand_dims(
                            self.prefer_assign_rank_var_ph)

                    with tf.name_scope("Register"):
                        self.register_diff_embedded = get_self_or_expand_dims(self.register_diff_ph)

                    with tf.name_scope("HomeworkCount"):
                        self.homework_count_embedded = get_self_or_expand_dims(self.homework_count_ph)

                    with tf.name_scope("WeekHomeworkCount"):
                        self.week_count_embedded = get_self_or_expand_dims(self.week_count_ph)

                    with tf.name_scope("Lastday"):
                        self.lastday_count_embedded = get_self_or_expand_dims(self.lastday_count_ph)

                # TODO: homework and reflect and style
                # with tf.name_scope("Style"):
                #     for fir in ["1", "2", "3", "4"]:
                #         for sec in ["100", "010", "001", "110", "101", "011", "111"]:

                #             key = "style_" + fir + "0" + sec + "_ph"
                #             embed_key = "style_" + fir + "0" + sec + "_embedded"
                #             setattr(self, embed_key,
                #                     get_self_or_expand_dims(getattr(self, key)))
                
                # homework
                with tf.name_scope("Homework"):
                    self.style_embeddings_var = tf.get_variable("style_embeddings_var",
                                                                [N_STYLE, EMBEDDING_DIM])
                    self.chapters_embeddings_var = tf.get_variable("chapters_embeddings_var",
                                                                   [N_CHAPTER, EMBEDDING_DIM])
                    self.sections_embeddings_var = tf.get_variable("sections_embeddings_var",
                                                                   [N_SECTION, EMBEDDING_DIM])
                    # tf.summary.histogram('homework_embeddings_var', self.homework_embeddings_var)
                    
                    self.today_chapters_embedded = get_mask_zero_embedded(self.chapters_embeddings_var,
                                                                          self.today_chapters_ph)
                    self.today_sections_embedded = get_mask_zero_embedded(self.sections_embeddings_var,
                                                                          self.today_sections_ph)

                    self.history_chap_embedded = get_history_sum_embedded(self)

                    self.today_style_embedded = tf.nn.embedding_lookup(self.style_embeddings_var,
                                                                       self.today_style_ph)
                    self.today_cha_rnn = get_rnn_sum(self.today_chapters_embedded,"rnncha")
                    self.today_sec_rnn = get_rnn_sum(self.today_sections_embedded,"rnnsec")
                # reflect
                with tf.name_scope("Reflect"):
                    self.reflect_embeddings_var = tf.get_variable("reflect_embeddings_var",
                                                                  [N_REFLECT, EMBEDDING_DIM])
                    # tf.summary.histogram('reflect_embeddings_var', self.reflect_embeddings_var)
                    self.reflect_value_embedded = get_mask_zero_embedded(self.reflect_embeddings_var,
                                                                         self.reflect_value_ph)
                    self.ref_rnn = get_rnn_sum(self.reflect_value_embedded,"ref")
Exemple #9
0
    def __init__(self, dim, samples
                 , init_w= tf.random_uniform_initializer(minval= -0.01, maxval= 0.01)
                 , ftype= tf.float32, scope= 'dbm'):
        self.dim, self.ftype = dim, ftype
        # todo pretraining
        with tf.variable_scope(scope):
            self.rbm = tuple(
                Rbm(scope= "rbm{}".format(i)
                    , dim_v= dim_v
                    , dim_h= dim_h
                    , samples= samples
                    , init_w= init_w
                    , ftype= self.ftype)
                for i, (dim_v, dim_h) in enumerate(zip(dim, dim[1:]), 1))
            self.w = tuple(rbm.w for rbm in self.rbm)
            # positive stage: variational inference
            self.m = tuple(rbm.h for rbm in self.rbm)
            self.v_ = self.rbm[0].v_
            self.k_meanf_ = tf.placeholder(name= 'k_meanf_', dtype= tf.int32, shape= ())

            def meanf(m):
                mf, ml = [], self.v_
                for wl, wr, mr in zip(self.w, self.w[1:], m[1:]):
                    mf.append(tf.sigmoid(tf.matmul(ml, wl) + tf.matmul(mr, wr, transpose_b= True)))
                    ml = mf[-1]
                mf.append(tf.sigmoid(tf.matmul(ml, wr)))
                return tuple(mf)

            with tf.name_scope('meanf'):
                self.meanf = tuple(
                    tf.assign(m, mf, validate_shape= False) for m, mf in zip(
                        self.m, tf.while_loop(
                            loop_vars= (self.k_meanf_, self.m)
                            , cond= lambda k, _: (0 < k)
                            , body= lambda k, m: (k - 1, meanf(m)))[1]))

            with tf.name_scope('pos'):
                bs = tf.cast(tf.shape(self.v_)[0], dtype= self.ftype)
                vm = (self.v_,) + self.meanf
                self.pos = tuple((tf.matmul(ml, mr, transpose_a= True) / bs) for ml, mr in zip(vm, vm[1:]))
            # negative stage: stochastic approximation
            self.x = tuple(rbm.v for rbm in self.rbm)
            self.x += (binary_variable(name= 'x', shape= (samples, self.dim[-1]), dtype= self.ftype),)
            self.v = self.x[0]
            self.k_gibbs_ = tf.placeholder(name= 'k_gibbs_', dtype= tf.int32, shape= ())

            def gibbs(x):
                x = list(x)
                # update odd layers
                for i, (xl, xr, wl, wr) in enumerate(zip(x[::2], x[2::2], self.w, self.w[1:])):
                    x[1+(2*i)] = binary(tf.matmul(xl, wl) + tf.matmul(xr, wr, transpose_b= True))
                # update first layer
                x[0] = binary(tf.matmul(x[1], self.w[0], transpose_b= True))
                # update even layers
                for i, (xl, xr, wl, wr) in enumerate(zip(x[1::2], x[3::2], self.w[1:], self.w[2:])):
                    x[2+(2*i)] = binary(tf.matmul(xl, wl) + tf.matmul(xr, wr, transpose_b= True))
                # update last layer
                x[-1] = binary(tf.matmul(x[-2], self.w[-1]))
                return tuple(x)

            with tf.name_scope('gibbs'):
                x = self.gibbs = tuple(
                    tf.assign(x, xg, validate_shape= False) for x, xg in zip(
                        self.x, tf.while_loop(
                            loop_vars= (self.k_gibbs_, self.x)
                            , cond= lambda k, x: (0 < k)
                            , body= lambda k, x: (k - 1, gibbs(x)))[1]))

            with tf.name_scope('neg'):
                bs = tf.cast(tf.shape(self.v)[0], dtype= self.ftype)
                self.neg = tuple((tf.matmul(xl, xr, transpose_a= True) / bs) for xl, xr in zip(x, x[1:]))
            # parameter update
            self.lr_ = tf.placeholder(name= 'lr_', dtype= self.ftype, shape= ())
            with tf.name_scope('up'):
                self.up = tuple(
                    w.assign_add((pos - neg) * self.lr_).op
                    for w, pos, neg in zip(self.w, self.pos, self.neg))
            self.step = 0
def get_history_bgru_embedded(self):
    # TODO: add mask info for this operation
    his_days = [
        'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine',
        'ten', 'eleven', 'twelve', 'thirteen', 'fourteen'
    ]

    for fir in his_days:
        key = "history_" + fir + "_chap_ph"
        embed_key = "history_" + fir + "_chap_embedded"
        setattr(
            self, embed_key,
            get_mask_zero_embedded(self.chapters_embeddings_var,
                                   getattr(self, key)))
    for fir in his_days:
        key = "history_" + fir + "_sec_ph"
        embed_key = "history_" + fir + "_sec_embedded"
        setattr(
            self, embed_key,
            get_mask_zero_embedded(self.sections_embeddings_var,
                                   getattr(self, key)))

    history_cha_embedded = []
    history_sec_embedded = []
    for fir in his_days[::-1]:
        key_c = "history_" + fir + "_chap_embedded"

        key_s = "history_" + fir + "_sec_embedded"
        #  B 3 128   B 128  14 B 128    B 14 128
        history_cha_embedded.append(get_rnn_sum(getattr(self, key_c),
                                                "rnncha"))
        history_sec_embedded.append(get_rnn_sum(getattr(self, key_s),
                                                "rnnsec"))

    history_cha_emb = tf.transpose(history_cha_embedded, [1, 0, 2])
    history_sec_emb = tf.transpose(history_sec_embedded, [1, 0, 2])

    with tf.name_scope("GRU"):
        num_layers = 2
        HIDDEN_DIM = 128
        KEEP_PROB = 0.8
        with tf.name_scope('cell'):

            def build_cell(n, m):
                cell = tf.nn.rnn_cell.GRUCell(n)
                cell = tf.nn.rnn_cell.DropoutWrapper(cell, output_keep_prob=m)
                return cell

            num_units = [HIDDEN_DIM // 2, HIDDEN_DIM]

            cell_fw = [build_cell(n, KEEP_PROB) for n in num_units]
            cell_bw = [build_cell(n, KEEP_PROB) for n in num_units]

        with tf.name_scope('gru'):
            biout, output_fw, output_bw = tf.contrib.rnn.stack_bidirectional_dynamic_rnn(
                cell_fw,
                cell_bw,
                inputs=tf.concat([history_cha_emb, history_sec_emb], axis=-1),
                dtype=tf.float32,
                scope='cha')
            rnnoutput = tf.reduce_sum(biout, axis=-2)

    return rnnoutput
Exemple #11
0
def get_history_gru_embedded(self):
    # TODO: add mask info for this operation
    his_days = [
        'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine',
        'ten', 'eleven', 'twelve', 'thirteen', 'fourteen'
    ]

    for fir in his_days:
        key = "history_" + fir + "_chap_ph"
        embed_key = "history_" + fir + "_chap_embedded"
        setattr(
            self, embed_key,
            get_mask_zero_embedded(self.chapters_embeddings_var,
                                   getattr(self, key)))
    for fir in his_days:
        key = "history_" + fir + "_sec_ph"
        embed_key = "history_" + fir + "_sec_embedded"
        setattr(
            self, embed_key,
            get_mask_zero_embedded(self.sections_embeddings_var,
                                   getattr(self, key)))

    # [1,2][3][4,3,5][0,0]...  chap    14天
    # [][][]  sec
    # cha_sec 拼接后rnn  cha rnn  sec rnn  拼劲 或者不拼接
    #
    history_cha_embedded = []
    history_sec_embedded = []
    for fir in his_days[::-1]:
        key_c = "history_" + fir + "_chap_embedded"

        key_s = "history_" + fir + "_sec_embedded"
        #  B 3 128   B 128  14 B 128    B 14 128
        history_cha_embedded.append(
            tf.reduce_mean(getattr(self, key_c), axis=-2))
        history_sec_embedded.append(
            tf.reduce_mean(getattr(self, key_s), axis=-2))

    history_cha_emb = tf.transpose(history_cha_embedded, [1, 0, 2])
    history_sec_emb = tf.transpose(history_sec_embedded, [1, 0, 2])

    with tf.name_scope("GRU"):
        num_layers = 2
        HIDDEN_DIM = 128
        KEEP_PROB = 0.8

        with tf.name_scope('gru1'):

            def get_cell():
                cell2 = tf.nn.rnn_cell.GRUCell(HIDDEN_DIM)
                cell2_ = tf.nn.rnn_cell.DropoutWrapper(
                    cell2, output_keep_prob=KEEP_PROB)
                return cell2_

            cells = [get_cell() for _ in range(num_layers)]
            Cell = tf.nn.rnn_cell.MultiRNNCell(cells, state_is_tuple=True)
            rnnoutput, _ = tf.nn.dynamic_rnn(cell=Cell,
                                             inputs=history_cha_emb,
                                             dtype=tf.float32,
                                             scope='cha')
            rnnoutput1 = tf.reduce_sum(rnnoutput, axis=-2)

    with tf.name_scope("GRU2"):
        num_layers = 2
        HIDDEN_DIM = 128
        KEEP_PROB = 0.8

        with tf.name_scope('gru2'):
            cells2 = []
            for _ in range(num_layers):
                cell = tf.nn.rnn_cell.GRUCell(HIDDEN_DIM)
                cell_ = tf.nn.rnn_cell.DropoutWrapper(
                    cell, output_keep_prob=KEEP_PROB)
                cells2.append(cell_)
            #cells2 = [get_cell2() for _ in range(num_layers)]
            Cell2 = tf.nn.rnn_cell.MultiRNNCell(cells2, state_is_tuple=True)
            rnnoutput2, _ = tf.nn.dynamic_rnn(cell=Cell2,
                                              inputs=history_sec_emb,
                                              dtype=tf.float32,
                                              scope='SEC')

            rnnoutput3 = tf.reduce_sum(rnnoutput2, axis=-2)

    #gru_out = tf.concat([rnnoutput1,rnnoutput3],axis=-1)

    return rnnoutput1, rnnoutput3
Exemple #12
0
def get_history_din_embedded(self):
    # TODO: add mask info for this operation
    his_days = [
        'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine',
        'ten', 'eleven', 'twelve', 'thirteen', 'fourteen'
    ]

    for fir in his_days:
        key = "history_" + fir + "_chap_ph"
        embed_key = "history_" + fir + "_chap_embedded"
        setattr(
            self, embed_key,
            get_mask_zero_embedded(self.chapters_embeddings_var,
                                   getattr(self, key)))
    for fir in his_days:
        key = "history_" + fir + "_sec_ph"
        embed_key = "history_" + fir + "_sec_embedded"
        setattr(
            self, embed_key,
            get_mask_zero_embedded(self.sections_embeddings_var,
                                   getattr(self, key)))

    history_cha_embedded = []
    history_sec_embedded = []
    for fir in his_days[::-1]:
        key_c = "history_" + fir + "_chap_embedded"

        key_s = "history_" + fir + "_sec_embedded"

        history_cha_embedded.append(get_rnn_sum(getattr(self, key_c),
                                                "rnncha"))
        history_sec_embedded.append(get_rnn_sum(getattr(self, key_s),
                                                "rnnsec"))

    #self.history_all_embedded = tf.reshape(, [None,len(history_all_embedded),EMBEDDING_DIM])
    # T*B*N   -。 B*T*N
    history_cha_emb = tf.transpose(history_cha_embedded, [1, 0, 2])
    history_sec_emb = tf.transpose(history_sec_embedded, [1, 0, 2])

    #dien
    with tf.name_scope('rnn_1'):
        rnn_outputs, _ = dynamic_rnn(GRUCell(HIDDEN_SIZE * 2),
                                     inputs=tf.concat(
                                         [history_cha_emb, history_sec_emb],
                                         axis=-1),
                                     sequence_length=self.history_len_ph,
                                     dtype=tf.float32,
                                     scope="gru1")
    with tf.name_scope('Attention_layer_1'):
        att_outputs, alphas = din_fcn_attention(tf.concat([
            get_rnn_sum(self.today_chapters_embedded, "rnncha"),
            get_rnn_sum(self.today_sections_embedded, "rnnsec")
        ],
                                                          axis=-1),
                                                rnn_outputs,
                                                ATTENTION_SIZE,
                                                self.history_mask_cha_ph,
                                                scope="1_1",
                                                softmax_stag=1,
                                                stag='1_1',
                                                mode='LIST',
                                                return_alphas=True)
    with tf.name_scope('rnn_2'):
        rnn_outputs2, final_state2 = dynamic_rnn(
            VecAttGRUCell(HIDDEN_SIZE * 2),
            inputs=rnn_outputs,
            att_scores=tf.expand_dims(alphas, -1),
            sequence_length=self.history_len_ph,
            dtype=tf.float32,
            scope="gru2")

    return final_state2
Exemple #13
0
 def __init__(self, dat, dim_rec, dim_z, dim_gen, scope='vae'):
     assert 2 == dat.ndim
     assert isinstance(dim_rec, tuple)
     assert isinstance(dim_z, int)
     assert isinstance(dim_gen, tuple)
     init_w = tf.variance_scaling_initializer(scale=2.0,
                                              mode='fan_in',
                                              distribution='uniform')
     init_b = tf.constant_initializer(0.01)
     init_z = tf.zeros_initializer()
     with tf.variable_scope(scope):
         dat = self.dat = tf.constant(name='dat', value=dat)
         bs_ = self.bs_ = tf.placeholder(name='bs_',
                                         dtype=tf.int32,
                                         shape=())
         bat = self.bat = tf.random_uniform(name='bat',
                                            shape=(bs_, ),
                                            minval=0,
                                            maxval=dat.shape[0],
                                            dtype=tf.int32)
         h = x = self.x = tf.nn.embedding_lookup(name='x',
                                                 params=dat,
                                                 ids=bat)
         for i, dim in enumerate(dim_rec, 1):
             name = "hr{}".format(i)
             h = tf.layers.dense(name=name,
                                 inputs=h,
                                 units=dim,
                                 activation=tf.nn.relu,
                                 kernel_initializer=init_w,
                                 bias_initializer=init_b)
             setattr(self, name, h)
         mu = self.mu = tf.layers.dense(name='mu',
                                        inputs=h,
                                        units=dim_z,
                                        kernel_initializer=init_w,
                                        bias_initializer=init_z)
         lv = self.lv = tf.layers.dense(name='lv',
                                        inputs=h,
                                        units=dim_z,
                                        kernel_initializer=init_w,
                                        bias_initializer=init_z)
         with tf.name_scope('z'):
             h = z = self.z = mu + tf.exp(
                 0.5 * lv) * tf.random_normal(shape=tf.shape(lv))
         for i, dim in enumerate(dim_gen, 1):
             name = "hg{}".format(i)
             h = tf.layers.dense(name=name,
                                 inputs=h,
                                 units=dim,
                                 activation=tf.nn.relu,
                                 kernel_initializer=init_w,
                                 bias_initializer=init_b)
             setattr(self, name, h)
         logits = tf.layers.dense(
             name='logits',
             inputs=h,
             units=dat.shape[1]
             # , activation= tf.nn.sigmoid
             ,
             kernel_initializer=init_w,
             bias_initializer=init_z)
         g = self.g = tf.sigmoid(logits)
         with tf.name_scope('loss_recons'):
             # loss_recons = self.loss_recons = tf.reduce_mean(
             #     tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(labels= x, logits= logits), axis= 1))
             loss_recons = self.loss_recons = tf.reduce_mean(
                 tf.reduce_sum(tf.square(x - g), axis=1))
         with tf.name_scope('loss_relent'):
             # loss_relent = self.loss_relent = tf.reduce_mean(
             #     0.5 * tf.reduce_sum((- 1.0 - lv + tf.exp(lv) + tf.square(mu)), axis= 1))
             loss_relent = self.loss_relent = tf.reduce_mean(
                 tf.reduce_sum((-1.0 - lv + tf.exp(lv) + tf.square(mu)),
                               axis=1))
         with tf.name_scope('loss'):
             loss = self.loss = loss_relent + loss_recons
         up = self.up = tf.train.AdamOptimizer().minimize(loss)
         self.step = 0
    def __init__(self, *, use_dice=False):
        self.graph = tf.Graph()
        self.tensor_info = {}
        self.use_dice = use_dice

        with self.graph.as_default():
            # Main Inputs
            with tf.name_scope('Main_Inputs'):

                self.target_ph = tf.placeholder(tf.float32, [None, None], name='target_ph')
                self.lr_ph = tf.placeholder(tf.float32, [], name="lr_ph")

                # with tf.name_scope("Teacher_Info"):
                self.teacher_id_ph = tf.placeholder(tf.int32, [None, ], name="teacher_id_ph")
                self.student_count_ph = tf.placeholder(tf.int32, [None, ], name="student_count_ph")
                self.province_id_ph = tf.placeholder(tf.int32, shape=[None, ], name="province_id_ph")
                self.city_id_ph = tf.placeholder(tf.int32, shape=[None, ], name="city_id_ph")
                # TODO: binary
                self.core_type_ph = tf.placeholder(tf.int32, shape=[None, ], name="core_type_ph")

                # with tf.name_scope("Class_Info"):
                self.class_id_ph = tf.placeholder(tf.int32, [None, ], name="class_id_ph")
         
                self.edition_id_ph = tf.placeholder(tf.int32, [None, ], name="edition_id_ph")
                self.grade_id_ph = tf.placeholder(tf.int32, [None, ], name="grade_id_ph")

                self.class_student_ph = tf.placeholder(tf.int32, [None, ], name="class_student_ph")

                self.cap_avg_ph = tf.placeholder(tf.float32, [None, ], name="cap_avg_ph")
                self.cap_max_ph = tf.placeholder(tf.float32, [None, ], name="cap_max_ph")
                self.cap_min_ph = tf.placeholder(tf.float32, [None, ], name="cap_min_ph")

                # with tf.name_scope("Homework_Info"):
                self.today_chapters_ph = tf.placeholder(tf.int32, [None, None], name="today_chapters_ph")
                self.today_sections_ph = tf.placeholder(tf.int32, [None, None], name="today_sections_ph")                
              
                self.today_chap_mask_ph = tf.placeholder(tf.float32, [None, None], name='today_chap_mask_ph')
                self.today_chap_len_ph = tf.placeholder(tf.int32, [None, ], name='today_chap_len_ph')
                self.today_sec_mask_ph = tf.placeholder(tf.float32, [None, None], name='today_sec_mask_ph')
                self.today_sec_len_ph = tf.placeholder(tf.int32, [None, ], name='today_sec_len_ph')
      
                self.today_style_ph = tf.placeholder(tf.int32, [None, ], name='today_style_ph')

                # TODO: use three dims to capture more history info
    
                for fir in ['one', 'two', 'three', 'four','five','six','seven','eight','nine','ten','eleven','twelve','thirteen','fourteen']:
                    key = "history_" + fir + "_chap_ph"
                    setattr(self, key,
                                tf.placeholder(tf.int32, [None, None], name=key))
                for fir in ['one', 'two', 'three', 'four','five','six','seven','eight','nine','ten','eleven','twelve','thirteen','fourteen']:
                    key = "history_" + fir + "_sec_ph"
                    setattr(self, key,
                                tf.placeholder(tf.int32, [None, None], name=key))
                # TODO: All belows should consider the type and input
                # with tf.name_scope("Study_Info"):
                # TODO: study_vector_ph's type can change?
            
                self.study_vector_ph = tf.placeholder(tf.float32, [None, 20], name="study_vector_ph")
                self.gap_days_ph = tf.placeholder(tf.int32, [None, ], name="gap_days_ph")

              
                self.month_submit_rate_ph = tf.placeholder(tf.float32, [None, ], name="month_submit_rate_ph")

                # with tf.name_scope("Capacity_Info"): 
                self.region_capacity_ph = tf.placeholder(tf.float32, [None, ], name="region_capacity_ph")

                # with tf.name_scope("Prefer_Info"):         
                self.prefer_assign_time_avg_ph = tf.placeholder(tf.float32, [None, ],
                                                                name="prefer_assign_time_avg_ph")
                self.prefer_assign_time_var_ph = tf.placeholder(tf.float32, [None, ],
                                                                name="prefer_assign_time_var_ph")
                self.prefer_assign_rank_avg_ph = tf.placeholder(tf.float32, [None, ],
                                                                name="prefer_assign_rank_avg_ph")
                self.prefer_assign_rank_var_ph = tf.placeholder(tf.float32, [None, ],
                                                                name="prefer_assign_rank_var_ph")

                # with tf.name_scope("Register_Info"):  
                self.register_diff_ph = tf.placeholder(tf.int32, [None, ], name="register_diff_ph")

                # with tf.name_scope("HomeworkCount_Info"): 
                self.homework_count_ph = tf.placeholder(tf.int32, [None, ], name="homework_count_ph")

       
                for fir in ["1", "2", "3", "4"]:
                    for sec in ["100", "010", "001", "110", "101", "011", "111"]:
                        key = "style_" + fir + "0" + sec + "_ph"
                        setattr(self, key,
                                tf.placeholder(tf.int32, [None, ], name=key))

                # with tf.name_scope("WeekHomeworkCount_Info"):
 
                self.week_count_ph = tf.placeholder(tf.float32, [None, ], name="week_count_ph")

                # with tf.name_scope("Reflect_Info"):
                # TODO: explore more graceful  
                self.reflect_value_ph = tf.placeholder(tf.int32, [None, None], name="reflect_value_ph")
                self.reflect_mask_ph = tf.placeholder(tf.float32, [None, None], name="reflect_mask_ph")
                self.reflect_len_ph = tf.placeholder(tf.int32, [None, ], name="reflect_len_ph")

                # with tf.name_scope("Lastdat_Info"): 
                self.lastday_count_ph = tf.placeholder(tf.int32, [None, ], name="lastday_count_ph")

            # Embedding layer
            with tf.name_scope('Main_Embedding_layer'):
                # almost done
                with tf.name_scope("Others"):
                    # teacher
                    with tf.name_scope("Teacher"):
                        self.teacher_id_embeddings_var = tf.get_variable("teacher_id_embeddings_var",
                                                                         [N_TEACHER, EMBEDDING_DIM], )
                        # tf.summary.histogram('teacher_id_embeddings_var', self.teacher_id_embeddings_var)
                        self.teacher_id_embedded = tf.nn.embedding_lookup(self.teacher_id_embeddings_var,
                                                                          self.teacher_id_ph, )

                        self.province_id_embeddings_var = tf.get_variable("province_id_embeddings_var",
                                                                          [N_PROVINCE, EMBEDDING_DIM])
                        # tf.summary.histogram('province_id_embeddings_var', self.province_id_embeddings_var)
                        self.province_id_embedded = tf.nn.embedding_lookup(self.province_id_embeddings_var,
                                                                           self.province_id_ph)

                        self.city_id_embeddings_var = tf.get_variable("city_id_embeddings_var",
                                                                      [N_CITY, EMBEDDING_DIM])
                        # tf.summary.histogram('city_id_embeddings_var', self.city_id_embeddings_var)
                        self.city_id_embedded = tf.nn.embedding_lookup(self.city_id_embeddings_var,
                                                                       self.city_id_ph)

                        self.core_type_embeddings_var = tf.get_variable("core_type_embeddings_var",
                                                                        [2, EMBEDDING_DIM])
                        # tf.summary.histogram('core_type_embeddings_var', self.core_type_embeddings_var)
                        self.core_type_embedded = tf.nn.embedding_lookup(self.core_type_embeddings_var,
                                                                         self.core_type_ph)
                        # just to use embedded for var,maybe tf.identify?
                        self.student_count_embedded = get_self_or_expand_dims(self.student_count_ph)

                    with tf.name_scope("Class"):
                        self.class_id_embeddings_var = tf.get_variable("class_id_embeddings_var",
                                                                       [N_CLASS, EMBEDDING_DIM])
                        # tf.summary.histogram('class_id_embeddings_var', self.class_id_embeddings_var)
                        self.class_id_embedded = tf.nn.embedding_lookup(self.class_id_embeddings_var,
                                                                        self.class_id_ph)

                        self.edition_id_embeddings_var = tf.get_variable("edition_id_embeddings_var",
                                                                         [N_EDITION, EMBEDDING_DIM])
                        # tf.summary.histogram('edition_id_embeddings_var', self.edition_id_embeddings_var)
                        self.edition_id_embedded = tf.nn.embedding_lookup(self.edition_id_embeddings_var,
                                                                          self.edition_id_ph)

                        self.grade_id_embeddings_var = tf.get_variable("grade_id_embeddings_var",
                                                                       [N_GRADE, EMBEDDING_DIM])
                        # tf.summary.histogram('grade_id_embeddings_var', self.grade_id_embeddings_var)
                        self.grade_id_embedded = tf.nn.embedding_lookup(self.grade_id_embeddings_var,
                                                                        self.grade_id_ph)
                        # just to use embedded for var,maybe tf.identify?
                        #连续值 dense 本身有意义的直接喂入
                        self.class_student_embedded = get_self_or_expand_dims(self.class_student_ph)
                        self.cap_avg_embedded = get_self_or_expand_dims(self.cap_avg_ph)
                        self.cap_max_embedded = get_self_or_expand_dims(self.cap_max_ph)
                        self.cap_min_embedded = get_self_or_expand_dims(self.cap_min_ph)

                    with tf.name_scope("Study"):
                        # just to use embedded for var,maybe tf.identify?
                        self.study_vector_embedded = self.study_vector_ph
                        self.gap_days_embedded = get_self_or_expand_dims(self.gap_days_ph)

                    with tf.name_scope("Submit"):
                        # just to use embedded for var,maybe tf.identify?
                        self.month_submit_rate_embedded = get_self_or_expand_dims(self.month_submit_rate_ph)

                    with tf.name_scope("Capacity"):
                        # just to use embedded for var,maybe tf.identify?
                        self.region_capacity_embedded = get_self_or_expand_dims(self.region_capacity_ph)

                    with tf.name_scope("Prefer"):
                        # just to use embedded for var,maybe tf.identify?
                        self.prefer_assign_time_avg_embedded = get_self_or_expand_dims(
                            self.prefer_assign_time_avg_ph)
                        self.prefer_assign_time_var_embedded = get_self_or_expand_dims(
                            self.prefer_assign_time_var_ph)
                        self.prefer_assign_rank_avg_embedded = get_self_or_expand_dims(
                            self.prefer_assign_rank_avg_ph)
                        self.prefer_assign_rank_var_embedded = get_self_or_expand_dims(
                            self.prefer_assign_rank_var_ph)

                    with tf.name_scope("Register"):
                        self.register_diff_embedded = get_self_or_expand_dims(self.register_diff_ph)

                    with tf.name_scope("HomeworkCount"):
                        self.homework_count_embedded = get_self_or_expand_dims(self.homework_count_ph)

                    with tf.name_scope("WeekHomeworkCount"):
                        self.week_count_embedded = get_self_or_expand_dims(self.week_count_ph)

                    with tf.name_scope("Lastday"):
                        self.lastday_count_embedded = get_self_or_expand_dims(self.lastday_count_ph)

                # TODO: homework and reflect and style
                with tf.name_scope("Style"):
                    for fir in ["1", "2", "3", "4"]:
                        for sec in ["100", "010", "001", "110", "101", "011", "111"]:
                            key = "style_" + fir + "0" + sec + "_ph"
                            embed_key = "style_" + fir + "0" + sec + "_embedded"
                            setattr(self, embed_key,
                                    get_self_or_expand_dims(getattr(self, key)))

                # homework
                with tf.name_scope("Homework"):
                    self.style_embeddings_var = tf.get_variable("style_embeddings_var",
                                                                [N_STYLE, EMBEDDING_DIM])
                    self.chapters_embeddings_var = tf.get_variable("chapters_embeddings_var",
                                                                   [N_CHAPTER, EMBEDDING_DIM])
                    self.sections_embeddings_var = tf.get_variable("sections_embeddings_var",
                                                                   [N_SECTION, EMBEDDING_DIM])
                    # tf.summary.histogram('homework_embeddings_var', self.homework_embeddings_var)
                    self.today_chapters_embedded = get_mask_zero_embedded(self.chapters_embeddings_var,
                                                                          self.today_chapters_ph)
                    self.today_sections_embedded = get_mask_zero_embedded(self.sections_embeddings_var,
                                                                          self.today_sections_ph)
                    self.history_chap_embedded,self.history_sec_embedded = get_history_bgru_embedded(self)
                    self.today_style_embedded = tf.nn.embedding_lookup(self.style_embeddings_var,
                                                                       self.today_style_ph)
                # reflect
                with tf.name_scope("Reflect"):
                    self.reflect_embeddings_var = tf.get_variable("reflect_embeddings_var",
                                                                  [N_REFLECT, EMBEDDING_DIM])
                    # tf.summary.histogram('reflect_embeddings_var', self.reflect_embeddings_var)
                    self.reflect_value_embedded = get_mask_zero_embedded(self.reflect_embeddings_var,
                                                                         self.reflect_value_ph)