Beispiel #1
0
def buildtopicmodel(filename,topicnum):
   pretreatment(filename)
#build the dictionary
   dictionary = corpora.Dictionary(finitialwords)
# build the whole corpus words-frequency list
   corpus = [dictionary.doc2bow(document) for document in finitialwords]
#print corpus
# calculate the tf-idf
   tfidf = models.TfidfModel(corpus)
   corpus_tfidf = tfidf[corpus]
# build the lda model
   lda = gensim.models.ldamodel.LdaModel(corpus_tfidf,id2word=dictionary,num_topics=topicnum,update_every=0,passes=10)
   index = similarities.MatrixSimilarity(lda[corpus])
   lda.save(str(topicnum) + ".pkl")
#model = models.ldamodel.LdaModel.load('20topics.pkl')
   print lda.print_topics
Beispiel #2
0
def buildtopicmodel(filename, topicnum):
    pretreatment(filename)
    #build the dictionary
    dictionary = corpora.Dictionary(finitialwords)
    # build the whole corpus words-frequency list
    corpus = [dictionary.doc2bow(document) for document in finitialwords]
    #print corpus
    # calculate the tf-idf
    tfidf = models.TfidfModel(corpus)
    corpus_tfidf = tfidf[corpus]
    # build the lda model
    lda = gensim.models.ldamodel.LdaModel(corpus_tfidf,
                                          id2word=dictionary,
                                          num_topics=topicnum,
                                          update_every=0,
                                          passes=10)
    index = similarities.MatrixSimilarity(lda[corpus])
    lda.save(str(topicnum) + ".pkl")
    #model = models.ldamodel.LdaModel.load('20topics.pkl')
    print lda.print_topics
Beispiel #3
0
def test():
    
    Buf = pretreatment()
    code = ''
    value = ''
    i = 0
    while(code != '#'):
        
        while(Buf[i] == ' '):
            i += 1
        
        code,value,i = scanner(Buf,i)
        print '(',code,',',value,')'
def main():
    
    TNT = "{};,ia=+*()x-#PLVSXYZ"
    M = [[2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0],
       [0,0,0,0,0,0,0,0,0,0,0,0,0,99,0,0,0,0,0,0,0],
       [0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,3,4,0,0,0,0],
       [0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
       [0,0,7,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
       [0,0,0,0,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
       [0,0,0,0,0,0,0,0,0,0,0,0,0,-1,0,0,0,0,0,0,0],
       [0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0],
       [0,0,0,0,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
       [0,0,-4,-4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
       [0,-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
       [0,0,0,0,0,0,13,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
       [0,0,-3,-3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
       [0,0,0,0,19,0,0,0,0,17,0,20,18,0,0,0,0,0,14,15,16],
       [0,-5,0,0,0,0,0,21,0,0,0,0,0,0,0,0,0,0,0,0,0],
       [0,-7,0,0,0,0,0,-7,22,0,-7,0,0,0,0,0,0,0,0,0,0],
       [0,-9,0,0,0,0,0,-9,-9,0,-9,0,0,0,0,0,0,0,0,0,0],
       [0,0,0,0,19,0,0,0,0,17,0,20,18,0,0,0,0,0,23,15,16],
       [0,0,0,0,19,0,0,0,0,17,0,20,18,0,0,0,0,0,0,0,24],
       [0,-12,0,0,0,0,0,-12,-12,0,-12,0,0,0,0,0,0,0,0,0,0],
       [0,-13,0,0,0,0,0,-13,-13,0,-13,0,0,0,0,0,0,0,0,0,0],
       [0,0,0,0,19,0,0,0,0,17,0,20,18,0,0,0,0,0,0,25,16],
       [0,0,0,0,19,0,0,0,0,17,0,20,18,0,0,0,0,0,0,0,26],
       [0,0,0,0,0,0,0,21,0,0,27,0,0,0,0,0,0,0,0,0,0],
       [0,-11,0,0,0,0,0,-11,-11,0,-11,0,0,0,0,0,0,0,0,0,0],
       [0,-6,0,0,0,0,0,-6,22,0,-6,0,0,0,0,0,0,0,0,0,0],
       [0,-8,0,0,0,0,0,-8,-8,0,-8,0,0,0,0,0,0,0,0,0,0],
       [0,-10,0,0,0,0,0,-10,-10,0,-10,0,0,0,0,0,0,0,0,0,0]]
    p = ["P'->P",
        "P->{L}",
        "L->V;S",
        "V->V,i",
        "V->ai",
        "S->i=X",
        "X->X+T",
        "X->Y",
        "Y->Y*Z",
        "Y->Z",
        "Z->(E)",
        "Z->-Z",
        "Z->i",
        "Z->x"]
    LIN = len(M)
    PRO_NUM = len(p)
    state = [0]*50
    top = 0
    symbol = ['#']*50
    global i
    code,value = '',''
    Buf = pretreatment()
    
    while(Buf[i] == ' '):
        i += 1
    
    code,value,i = scanner(Buf,i)
    
    j = 0
    print 'step','\t\t\t\t','状态栈','\t\t\t','    ','符号栈','\t\t\t','  ','输入串首字符'
    while(1):
        print j,')','\t\t\t\t',
        j += 1
        state_str = ''
        for x in range(top+1):
            state_str += str(state[x])
        print  state_str,' '*(20-len(state_str)),
        print '\t\t',
        symbol_str = ''
        for y in range(top+1):
            symbol_str += str(symbol[y])
        print symbol_str,' '*(10-len(symbol_str)),
        print '\t\t\t',code
        action = M[state[top]][col(code,TNT)]
        if(action >= 1 and action < LIN):
            top += 1
            state[top] = action
            symbol[top] = code
            while(Buf[i] == ' '):
                i += 1
            code,value,i = scanner(Buf,i)
        elif action > -PRO_NUM and action <= -1:
            top -= len(p[-action]) -3
            state[top+1] = M[state[top]][col(p[-action][0],TNT)]
            top += 1
            symbol[top] = p[-action][0]
        elif action == 99:
            print '\t\t\t\t','Acc'
            break
        else:
            print 'Err in main->',action
            break
Beispiel #5
0
#         plt.xlabel(nameCols[0])
#         plt.ylabel(nameCols[1])
#     plt.show()

    def neuronalNetwork(self):
        model = tf.keras.models.Sequential()
        #model.add(tf.keras.layers.Dense(32, activation="relu")) #16 neurones en entrée
        #model.add(tf.keras.layers.Dense(32,activation="relu")) #12 neurones suivant
        #model.add(tf.keras.layers.Dense(32, activation="relu")) # dernier neuroens
        #model.compile(loss='sparse_categorical_crossentropy', optimizer='sgd')#, metrics=["accuracy"]) #fct couts, Stokastique gradient descent, //
        model.add(tf.keras.layers.Dense(units=16, input_shape=[16]))
        model.add(tf.keras.layers.Dense(units=5, input_shape=[16]))
        model.compile(optimizer='sgd', loss='mean_squared_error')
        history = model.fit(self.X, self.Y, epochs=10)  #X, Y, nb d'itération
        print(history)
        modelOutput = model.predict(pre.dataXVector)
        model.summary()
        print(modelOutput, pre.dataYVector)

if __name__ == "__main__":
    pretreat = pretreatment(importData('simulateGenData.csv'))
    pretreat.shape()
    pretreat.normalize()
    treat = treatment([pretreat.dataXVector, pretreat.dataYVector])
    treat.neuronalNetwork()

    #treat.linearRegression()
    #treat.plot(['windSpd', 'spd'])
    #treat.plot(['windSpd'])
    #treat.polynomialRegression()