def GET(self, arg=None): # Get query params query = web.input( bits = 6, words = 5 ) # Change output to JSON web.header('Content-type', 'application/json') # If no pattern at the end of the url, # we will generate a random password if not arg: try: words = model.get_words(results = 2**int(query.bits)) # Convert iterator wordlist = [] for word in words: wordlist.append(word.word) except(), e: web.internalerror(str(e)) raise try: generatedPass = generate_password( int(query.words), wordlist ) except(), e: web.internalerror(str(e)) raise
def generate(): string = request.form.get('text') number = request.form['nextstep'] if number is '': number = 35 if string is '': string = 'Oh Romeo, Oh Romeo,' prediction = get_words(model, string, int(number)) result = "Result" return render_template('index.html', prediction=prediction, result=result)
def corpus_words_list(): word_list = model.get_words() if session: logged_in = 'Logged in as: %s.'%session['user'] not_you = 'Not %s?'%session['user'] return render_template('browse_list_words.html', logged_in=logged_in, corpus_list=word_list, not_you=not_you) else: return render_template('browse_list_words.html', corpus_list=word_list)
def corpus_words_list(): word_list = model.get_words() if session: logged_in = 'Logged in as: %s.' % session['user'] not_you = 'Not %s?' % session['user'] return render_template('browse_list_words.html', logged_in=logged_in, corpus_list=word_list, not_you=not_you) else: return render_template('browse_list_words.html', corpus_list=word_list)
def main(data_dir, out_dir, n_iter=10, vector_len=300, vocab_size=20000, hidden_len=300, depth=3, drop_rate=0.3, rho=1e-4, batch_size=24): print("Loading") nlp = spacy.en.English(parser=False) dataset = Dataset(nlp, data_dir / 'train', batch_size) print("Training") network = model.train(dataset, vector_len, hidden_len, 2, vocab_size, depth, drop_rate, rho, n_iter, model_writer(out_dir, 'model_{epoch}.pickle')) score = model.Scorer() print("Evaluating") for doc, label in read_data(nlp, data_dir / 'test'): word_ids, embeddings = model.get_words(doc, 0.0, vocab_size) guess = network.forward(word_ids, embeddings) score += guess == label print(score)