예제 #1
0
def run():

    print('加载数据开始')
    now = time.time()
    rec_table = load_origin_data(dbug_paht)
    recNum = len(rec_table)
    print('原始数据:\n', rec_table)
    print('总记录数:%d' % (recNum))
    R = np.zeros([142, 4500, 64])    \
        u = rec_table[:,0].astype(int)
    s = rec_table[:, 1].astype(int)
    t = rec_table[:, 2].astype(int)
    R[u, s, t] = rec_table[:, 3]
    print('加载数据完成,耗时 %.2f秒\n' % ((time.time() - now)))

    for rid in range(epon):
        allret = {}
        for spa in spas:
            print('开始 round-%d,spa-%.1f' % (rid, spa))

            print('分割数据集开始')
            now = time.time()
            train, test = spliter(R, spa, rid)
            print('分割数据集结束,耗时 %.2f秒\n' % ((time.time() - now)))

            print('预测开始')
            now = time.time()
            predict = np.zeros_like(train)
            for t in range(64):
                print('时间片%d预测开始' % t)
                predict[:, :, t] = upcc(train[:, :, t], 10)
                print(mae(test[:, :, t], predict[:, :, t]))
            print('预测结束,耗时 %.2f秒\n' % ((time.time() - now)))

            print('评测开始')
            now = time.time()
            ret = evel(test, predict)
            allret[spa] = ret
            print('评测结束,耗时 %.2f秒\n' % ((time.time() - now)))

            print(ret)

        for spa in allret:
            retstr = '==================================\n'
            retstr += 'round-%d spa-%.2f\n' % (rid, spa)
            rec = allret[spa]
            retstr += 'all - mae \t%f\n' % (rec[0])
            for i in range(1, len(rec)):
                retstr += 't-%d mae \t%f\n' % (i, rec[i])
            retstr += 'localtime \t%s\n' % (time.asctime())
            retstr += '==================================\n\n'
            print(retstr)
            fwrite.fwrite_append(result, retstr)

    pass
예제 #2
0
def run():
    
    print ('加载数据开始');
    now = time.time();
    rec_table = load_origin_data(dbug_paht);
    recNum = len(rec_table);
    print('原始数据:\n',rec_table);
    print('总记录数:%d'%(recNum))
    R = np.zeros([142,4500,64]);\
    u = rec_table[:,0].astype(int);
    s = rec_table[:,1].astype(int);
    t = rec_table[:,2].astype(int);
    R[u,s,t]=rec_table[:,3];
    print ('加载数据完成,耗时 %.2f秒\n'%((time.time() - now)));
    for rid in range(epon):
        allret = {};
        for spa in spas:
            print('开始 round-%d,spa-%.1f'%(rid,spa));
             
            print('分割数据集开始');
            now = time.time();
            train,test = spliter(R,spa,rid);
            print('分割数据集结束,耗时 %.2f秒\n'%((time.time() - now)))
             
            print('预测开始');
            now = time.time();
            predict = baseline_predict(train);
            print('预测结束,耗时 %.2f秒\n'%((time.time() - now)))
             
            print('评测开始');
            now = time.time();
            ret = evel(test,predict);
            allret[spa] = ret;
            print('评测结束,耗时 %.2f秒\n'%((time.time() - now)))
            
            print(ret);
    
        for spa in allret:        
            retstr = '==================================\n';
            retstr+= 'round-%d spa-%.2f\n'%(rid,spa);
            rec = allret[spa];
            retstr+= 'all - mae \t%f\n'%(rec[0]);
            for i in range(1,len(rec)):
                retstr+= 't-%d mae \t%f\n'%(i,rec[i]);
            retstr+='localtime \t%s\n'%(time.asctime());    
            retstr+= '==================================\n\n';
            print(retstr);    
            fwrite.fwrite_append(result, retstr);
    
    pass;
예제 #3
0
def write2file(res):
    for li in res:
        fwrite.fwrite_append(loc_class_out, utils.arr2str(li))
예제 #4
0
def classf(carr, tagdir):
    res = []
    for idx in tagdir:
        if tagdir[idx][1] in carr:
            res.append(idx)
    fwrite.fwrite_append(loc_class_out, utils.arr2str(res))
예제 #5
0
    def train(self, NcfTraParm):

        k = NcfTraParm.classif_size
        class_parm = [{} for _ in range(k)]

        for i in range(k):
            with tf.name_scope('class%d' % (i)):

                train_data = reoge_data(NcfTraParm.train_data[i])
                test_data = reoge_data(NcfTraParm.test_data[i])
                testn = len(test_data[0])
                global_step = tf.Variable(0, trainable=False, name='gs')
                class_parm[i]['global_step'] = global_step
                ds = tf.data. \
                        Dataset.from_tensor_slices(train_data)
                ds = ds.shuffle(1000).batch(NcfTraParm.batch_size)

                test_ds = tf.data.Dataset.from_tensor_slices(test_data)
                test_ds = test_ds.batch(testn)
                it = tf.data.Iterator.from_structure(ds.output_types,
                                                     ds.output_shapes)

                feat, Y = it.get_next()
                train_init_op = it.make_initializer(ds)
                test_init_op = it.make_initializer(test_ds)

                class_parm[i]['train_init_op'] = train_init_op
                class_parm[i]['test_init_op'] = test_init_op

                _, loss, tmae, trmse = self.create_model(
                    feat, Y, self.create_param)

                class_parm[i]['loss'] = loss
                class_parm[i]['tmae'] = tmae
                class_parm[i]['trmse'] = trmse

                # loss+=tf.losses.get_regularization_loss();

                lr = tf.train.exponential_decay(NcfTraParm.learn_rate,
                                                global_step,
                                                NcfTraParm.lr_decy_step,
                                                NcfTraParm.lr_decy_rate,
                                                staircase=True)

                train_step = tf.train.AdagradOptimizer(lr). \
                            minimize(loss, global_step )

                class_parm[i]['train_step'] = train_step

        # summ_meg = tf.summary.merge_all();
        save = tf.train.Saver()
        with tf.Session() as sess:
            # train_summ = tf.summary.FileWriter(NcfTraParm.summary_path+'/train',sess.graph);
            # test_summ =tf.summary.FileWriter(NcfTraParm.summary_path+'/test');
            if NcfTraParm.load_cache_rec:
                save.restore(sess, NcfTraParm.cache_rec_path)
            else:
                sess.run(tf.global_variables_initializer())

            now = time.time()
            eptime = now
            for ep in range(NcfTraParm.epoch):
                test_tmae = 0.0
                cot = 0
                for i in range(k):
                    sess.run(class_parm[i]['train_init_op'])
                    while True:
                        try:
                            _, vloss, gs = sess.run(
                                (class_parm[i]['train_step'],
                                 class_parm[i]['loss'],
                                 class_parm[i]['global_step']))
                            if gs % (500) == 0:
                                print(
                                    'ep%d\t class%d\t loopstep:%d\t time:%.2f\t loss:%f'
                                    % (ep, i, gs, time.time() - now, vloss))
                                # summ = sess.run((summ_meg));
                                # train_summ.add_summary(summ, gs);
                                now = time.time()
                        except tf.errors.OutOfRangeError:
                            break
                    sess.run(class_parm[i]['test_init_op'])
                    # summ,vmae,vrmse,vloss=sess.run((summ_meg,tmae,trmse,loss));
                    vmae, vrmse, vloss = sess.run(
                        (class_parm[i]['tmae'], class_parm[i]['trmse'],
                         class_parm[i]['loss']))
                    test_tmae += vmae * len(NcfTraParm.test_data[i])
                    cot += len(NcfTraParm.test_data[i])
                    print(vmae)
                    # test_summ.add_summary(summ, ep);
                vmae = test_tmae / cot
                eps = '==================================================\n'
                eps += 'ep%d结束 \t eptime=%.2f\n' % (ep, time.time() - eptime)
                eps += 'test_mae=%f test_rmse=%f\n' % (vmae, vrmse)
                eps += 'acttime=%s\n' % (time.asctime())
                eps += '==================================================\n'
                eptime = time.time()
                print(eps)
                if NcfTraParm.result_file_path != '':
                    fwrite_append(NcfTraParm.result_file_path, eps)

                print('ep%d结束 \t eponloss=%f\t test_mae=%f test_rmse=%f\n' %
                      (ep, vloss, vmae, vrmse))

            if NcfTraParm.cache_rec_path != '':
                save.save(sess, NcfTraParm.cache_rec_path)
        pass
예제 #6
0
            predict[:,:,t] = pmf(train[:,:,t],32,50,0.001,0.01);
            print(mae(test[:,:,t], predict[:,:,t]));
        print('预测结束,耗时 %.2f秒\n'%((time.time() - now)))
         
        print('评测开始');
        now = time.time();
        ret = evel(test,predict);
        allret[spa] = ret;
        print('评测结束,耗时 %.2f秒\n'%((time.time() - now)))
        
        print(ret);

    for spa in allret:        
        retstr = '==================================\n';
        retstr+= 'round-%d spa-%.2f\n'%(rid,spa);
        rec = allret[spa];
        retstr+= 'all - mae \t%f\n'%(rec[0]);
        for i in range(1,len(rec)):
            retstr+= 't-%d mae \t%f\n'%(i,rec[i]);
        retstr+='localtime \t%s\n'%(time.asctime());    
        retstr+= '==================================\n\n';
        print(retstr);    
        fwrite.fwrite_append(result, retstr);
    
    pass;

if __name__ == '__main__':
    for rid in range(eponch):
        for spa in spas:
            run(spa,case,rid);
    pass
예제 #7
0
파일: ncf3d.py 프로젝트: ZWP-FlyZ/NewWst64
    def train(self,NcfTraParm3D):

        # 去除无效数据,分割特征和标签
        train_data = reoge_data3D(NcfTraParm3D.train_data);
        test_data = reoge_data3D(NcfTraParm3D.test_data);
        testn = len(test_data[0]);
        print(testn);
        global_step = tf.Variable(0,trainable=False,name='gs');
        ds = tf.data. \
                Dataset.from_tensor_slices(train_data);
        ds = ds.shuffle(100).batch(NcfTraParm3D.batch_size);
        
        test_ds = tf.data.Dataset.from_tensor_slices(test_data);
        test_ds = test_ds.batch(testn//20);
        it = tf.data.Iterator.from_structure(ds.output_types,
                                            ds.output_shapes);
        
        feat,Y = it.get_next(); 
        train_init_op = it.make_initializer(ds);
        test_init_op = it.make_initializer(test_ds);   
            
        Py,loss,tmae,trmse= self.create_model(feat, Y, self.create_param);
        tf.summary.scalar('loss',loss);
        tf.summary.scalar('mae',tmae);
        tf.summary.scalar('rmse',trmse);
        
        # loss+=tf.losses.get_regularization_loss;
        
        lr = tf.train.exponential_decay(NcfTraParm3D.learn_rate, global_step,
                                NcfTraParm3D.lr_decy_step,
                                NcfTraParm3D.lr_decy_rate,
                                staircase=True);
                                
        train_step = tf.train.AdagradOptimizer(lr). \
                    minimize(loss, global_step );
        summar_meg = tf.summary.merge_all();
        
        save = tf.train.Saver();
        with tf.Session() as sess:
            train_summ=tf.summary.FileWriter(NcfTraParm3D.summary_path+'/train',sess.graph);
            test_summ = tf.summary.FileWriter(NcfTraParm3D.summary_path+'/test',sess.graph);
            if NcfTraParm3D.load_cache_rec:
                save.restore(sess,NcfTraParm3D.cache_rec_path);
            else:
                sess.run(tf.global_variables_initializer()); 
            
            now = time.time();
            eptime = now;
            for ep in range(NcfTraParm3D.epoch):
                sess.run(train_init_op);
                while True:
                    try:
                        _,vloss,gs=sess.run((train_step,loss,global_step));
                        
                        if gs%(1000) == 0:
                            print('ep%d\t loopstep:%d\t time:%.2f\t loss:%f'%(ep,gs,time.time()-now,vloss))
                            now=time.time();
                            summ = sess.run(summar_meg)
                            train_summ.add_summary(summ,gs);
                    except tf.errors.OutOfRangeError:
                        break  
                sess.run(test_init_op);
                summae=0;
                while True:
                    try:
                        summ,vmae,vrmse,vloss=sess.run((summar_meg,tmae,trmse,loss));
                        summae+=vmae;
                    except tf.errors.OutOfRangeError:
                        break;
                summae/=20;
                eps = '==================================================\n'
                eps += 'ep%d结束 \t eptime=%.2f\n' %(ep ,time.time()-eptime);
                eps += 'test_mae=%f test_rmse=%f\n'%(summae,vrmse);
                eps += 'acttime=%s\n'%(time.asctime());
                eps += '==================================================\n'
                eptime = time.time();
                print(eps);
                if NcfTraParm3D.result_file_path != '': 
                    fwrite_append(NcfTraParm3D.result_file_path,eps);
            if NcfTraParm3D.cache_rec_path != '':
                save.save(sess,NcfTraParm3D.cache_rec_path)
            train_summ.close();
            test_summ.close();
예제 #8
0
파일: nncf.py 프로젝트: ZWP-FlyZ/NewWst64
    def conbine_train(self,NcfTraParmUST):
        
        
        ################################ 定义 ncf ##################################
        # 去除无效数据,分割特征和标签
        train_data = reoge_data3D(NcfTraParmUST.train_data);
        test_data = reoge_data3D(NcfTraParmUST.test_data);
        testn = len(test_data[0]);
        print(testn);
        global_step = tf.Variable(0,trainable=False,name='gs');
        ds = tf.data. \
                Dataset.from_tensor_slices(train_data);
        ds = ds.shuffle(1000).batch(NcfTraParmUST.batch_size);
        
        test_ds = tf.data.Dataset.from_tensor_slices(test_data);
        test_ds = test_ds.batch(testn//20);
        it = tf.data.Iterator.from_structure(ds.output_types,
                                            ds.output_shapes);
        
        feat,Y = it.get_next(); 
        train_init_op = it.make_initializer(ds);
        test_init_op = it.make_initializer(test_ds);   
            
        Py,loss,tmae,trmse= self.create_ncf_model(feat, Y, self.create_param);
        tf.summary.scalar('loss',loss);
        tf.summary.scalar('mae',tmae);
        tf.summary.scalar('rmse',trmse);
        
        # loss+=tf.losses.get_regularization_loss;
        
        lr = tf.train.exponential_decay(NcfTraParmUST.learn_rate, global_step,
                                NcfTraParmUST.lr_decy_step,
                                NcfTraParmUST.lr_decy_rate,
                                staircase=True);
                                
        train_step = tf.train.AdagradOptimizer(lr). \
                    minimize(loss, global_step );
        summar_meg = tf.summary.merge_all();
        
        
        ##################################### 定义 rnn #####################################
        
        seq_len = NcfTraParmUST.seq_len;
        t_strat,t_end = NcfTraParmUST.time_range;
        
        rnn_lr = NcfTraParmUST.rnn_learn_rat;
        rnn_epoch = NcfTraParmUST.rnn_epoch;
        
        R = self.get_hid_kernel('R');
        nR = R[t_strat:t_end];
        splitedR = tf.reshape(nR,[-1,seq_len+1,R.shape[1]]);
        print(splitedR);
        rnnds = tf.data.Dataset.from_tensor_slices(splitedR);
    #     ds.map(lambda i:(i[:-1],i[1:]));
        rnnit = rnnds.make_initializable_iterator();
        nextitem = rnnit.get_next();
        x,y=tf.expand_dims(nextitem[:-1],0),\
                tf.expand_dims(nextitem[1:],0);
        
        x = tf.cast(x,tf.float32);        
                
        rnnpy,rnnloss,rnnmae,rnngru=self.create_rnn_model(x,y,NcfTraParmUST);
        
        
        ########################## 定义 rnn 预测 #########################
        # 处理需要预测时间片的时间特征
        splitedR = tf.reshape(R[t_end-1],[-1,1,R.shape[1]]);
        pRet = rnngru(splitedR);
        pRet = tf.reshape(pRet,[1,R.shape[1]]);
        newR = tf.concat([R[:t_end],pRet,R[t_end+1:]],axis=0);
        newP = self.get_hid_kernel('P');
        newQ = self.get_hid_kernel('Q');
        
        predict_data = reoge_data3D(NcfTraParmUST.ts_train_data);
        rnn_ds = tf.data.Dataset.from_tensor_slices(predict_data);
        rnn_ds = rnn_ds.batch(len(predict_data[0])//20);
        pre_it = rnn_ds.make_one_shot_iterator();
        perfeat,preY = pre_it.get_next();
        U,S,T = self.toUSToneHot(perfeat);
        newPu = tf.matmul(U,newP);
        newQs = tf.matmul(S,newQ);
        newRt = tf.matmul(T,newR);
        ppy = self.get_ncf_nn_front(newPu, newQs, newRt, 
                    self.create_param.hid_units,
                    (tf.nn.relu,tf.nn.relu), None);
        print(tf.trainable_variables())
        pre_mae = tf.reduce_mean(tf.abs(ppy-preY));
        Ret = R[t_end];
        

        
        train_op = tf.train.AdamOptimizer(rnn_lr).minimize(rnnloss);
        
        save = tf.train.Saver();
        with tf.Session() as sess:
            train_summ=tf.summary.FileWriter(NcfTraParmUST.summary_path+'/train',sess.graph);
            test_summ = tf.summary.FileWriter(NcfTraParmUST.summary_path+'/test',sess.graph);
            if NcfTraParmUST.load_cache_rec:
                save.restore(sess,NcfTraParmUST.cache_rec_path);
            else:
                sess.run(tf.global_variables_initializer()); 
                now = time.time();
                eptime = now;
                for ep in range(NcfTraParmUST.epoch):
                    sess.run(train_init_op);
                    # 训练
                    while True:
                        try:
                            _,vloss,gs=sess.run((train_step,loss,global_step));
                            
                            if gs%(1000) == 0:
                                print('ep%d\t loopstep:%d\t time:%.2f\t loss:%f'%(ep,gs,time.time()-now,vloss))
                                now=time.time();
                                summ = sess.run(summar_meg)
                                train_summ.add_summary(summ,gs);
                        except tf.errors.OutOfRangeError:
                            break
                    
                    # 评估 
                    sess.run(test_init_op);
                    summae=0;
                    while True:
                        try:
                            summ,vmae,vrmse,vloss=sess.run((summar_meg,tmae,trmse,loss));
                            summae+=vmae;
                        except tf.errors.OutOfRangeError:
                            break;
                    
                    summae/=20;
                    eps = '==================================================\n'
                    eps += 'ep%d结束 \t eptime=%.2f\n' %(ep ,time.time()-eptime);
                    eps += 'test_mae=%f test_rmse=%f\n'%(summae,vrmse);
                    eps += 'acttime=%s\n'%(time.asctime());
                    eps += '==================================================\n'
                    eptime = time.time();
                    print(eps);
                    if NcfTraParmUST.result_file_path != '': 
                        fwrite_append(NcfTraParmUST.result_file_path,eps);
                
                if NcfTraParmUST.cache_rec_path != '':
                    save.save(sess,NcfTraParmUST.cache_rec_path)
                train_summ.close();
                test_summ.close();  
            ############## end else ###################
            
        ########################## RNN ##########################
            
            for ep in range(rnn_epoch):
                sess.run(rnnit.initializer)
                maesum=0.0;
                cot=0;
                while True:
                    try:
                        _,vx,vy,vpy,vloss,vmae,vpret,urrr=sess.run([train_op,x,y,rnnpy,rnnloss,rnnmae,pRet,Ret])
                        maesum+=vmae;
                        cot+=1;
#                         print('x:\n',vx);
#                         print('y:\n',vy);
#                         print('py:\n',vpy);
#                         print('loss:\n',vloss);
#                         print('mae:\n',vmae);
#                         print('pRet:\n',vpret);
#                         print('------------------------')
                    except tf.errors.OutOfRangeError:break;
                maesum/=cot;
#                 print('pRet:\n',vpret,urrr);
                print('ep%d end mae=%f'%(ep,maesum));
            
            
            predict_sum_mae =0.0;
            predict_cot=0;
            while True:
                    try:
                        vmae=sess.run(pre_mae)
                        predict_sum_mae+=vmae;
                        predict_cot+=1;
                    except tf.errors.OutOfRangeError:break;    
            print('all predict mae',predict_sum_mae/predict_cot);