def generate(): '''Runs the generation cycle''' # Import here to prevent slowdown in different modes from generate import Generator gen = Generator() gen.run(args.save_path[0], args.delete)
def main(): global generator generator = Generator(OPTS.weights_file, OPTS.id2token_file, OPTS.embedding_size, OPTS.hidden_size) PostingRunner().start() if OPTS.thread_id: selected_posts = select_thread_posts(OPTS.board, thread_id=OPTS.thread_id, max_posts=OPTS.max_posts, min_post_len=OPTS.min_post_len, max_post_len=OPTS.max_post_len) for post in selected_posts: produce_post(OPTS.thread_id, post) else: selected_threads = select_threads(OPTS.board, max_threads=OPTS.max_threads, min_post_len=OPTS.min_post_len, max_post_len=OPTS.max_post_len) for thread in selected_threads: thread_id, posts = thread produce_post(thread_id, posts[0]) while True: time.sleep(1)
def load_gpt2(self, model_name): if self.gpt2: del self.gpt2 torch.cuda.empty_cache() self.gpt2 = Generator(model_name) self.generated_text = self.gpt2.default self.generated_sentences = []
def test_generate_property_site_map_passes_each_es_data_page_to_site_map_creator( self, mock_create_site_map_index_file, mock_flush_site_map, mock_append_urls_to_site_map, mock_clear_site_map_directory, mock_client_exit, mock_next_page_of_records): url_list_1 = [ SiteMapUrl(location='loc1', last_modified='2015-03-01', change_frequency='weekly') ] url_list_2 = [ SiteMapUrl(location='loc2', last_modified='2015-03-02', change_frequency='daily') ] mock_next_page_of_records.side_effect = [url_list_1, url_list_2, []] Generator(CONFIG).generate_property_site_map() mock_clear_site_map_directory.assert_called_once() self.assertEqual(len(mock_next_page_of_records.mock_calls), 3) self.assertListEqual( mock_append_urls_to_site_map.mock_calls, [call(url_list_1), call(url_list_2)]) mock_flush_site_map.assert_called_once_with() mock_create_site_map_index_file.assert_called_once_with() self.assertEqual(len(mock_client_exit.mock_calls), 1)
def test_get_permutations(self): chunks = ['a'] result = Generator("dummy_file", "output.txt").get_permutations(chunks) expected = ['a <::::>', '<::::> a'] self.assertEqual(expected, result)
def test_get_pairs(self): chunks = ['a', 'b'] result = Generator("dummy_file", "output.txt").get_pairs(chunks) expected = [' a <::::> b '] self.assertEqual(expected, result)
def run(self, args): gen = Generator() gen.length = args.length gen.seed = args.seed gen.model = args.model gen.load_model_from_file(gen._model) self._print(gen.generate(), args.output_path)
def test_drop_words_permutations_all(self): chunks = ['a'] result = Generator("dummy_file", "output.txt", drop_words_percentage=1).get_permutations(chunks) expected = [' <::::> ', ' <::::> '] self.assertEqual(expected, result)
def generate_pass(): generate_button.configure(bg="black", fg="white") s = security_levels.index(comboBox.get()) r = "".join( set("" if other_entry.get() == "Other required characters" else other_entry.get())) if int(min_spin.get()) > 50: min_spin.delete(0, END) min_spin.insert(0, 50) if int(max_spin.get()) > 100: max_spin.delete(0, END) max_spin.insert(0, 100) a = Generator.generate_password(s, r, int(min_spin.get()), int(max_spin.get()), c_check.get(), s_check.get()) output.delete(0, END) output.insert(0, str(a)) b = "\n Copy pasted passwords do not save when window is closed" if (max_spin.get() != "0") and (int(min_spin.get()) > int(max_spin.get())): max_spin.delete(0, END) max_spin.insert(0, min_spin.get()) generate_pass() else: if s == 0: note.configure( text= "Note: Password made up of adjective, nouns, repeating numbers and required characters" + b) elif s == 1: if output.get( ) != "I was too lazy to debug this, just increase the maximum please": note.configure( text= "Note: First part of password is a slice of the alphabet" + b) else: note.configure(text=":(") elif s == 2: if len(output.get()) > 10: note.configure( text= "Note: This password is hard to remember, best if written down somewhere" + b) else: note.configure( text= "Note: Copy pasted passwords do not save when window is closed" ) else: note.configure( text= "Note: This password is really hard to remember, best if written down somewhere" + b)
def test_drop_words_pairs_half(self): chunks = ['a', 'b'] result = Generator("dummy_file", "output.txt", drop_words_percentage=0.5).get_pairs(chunks) first_sentence_words = len(result[0].split('<::::>')[0].split()) self.assertEqual(first_sentence_words, 1)
def generate(): text = request.args['keywords'] planner = Planner() generator = Generator() keywords = planner.plan(text) # print("Keywords: " + ' '.join(keywords)) poem = generator.generate(keywords) # print("Poem generated:") # for sentence in poem: # print(sentence) return '\n'.join(poem)
def __init__(self, device, sample_size=1000, z_dim=62, active_learning=False): self.device = device self.sample_size = sample_size self.z_dim = z_dim self.active_learning = active_learning self.G = Generator(1, input_size=32).to(device) self.G.load_state_dict( torch.load( 'utils/cifar10_2class/trained_models/dcgan/netG_epoch_99.pth', map_location=self.device))
def __init__(self): super().__init__() self.dataset = Dataset() self.runner = TrainRunner() self.generator = Generator() self.generate_parser = argparse.ArgumentParser() self.generate_parser.add_argument( "--count", help="set count of HR imgs to be used, default to the total", type=int) self.generate_parser.add_argument( "--size", help="set size of a sub-crop img, default to 480", type=int)
def test_generate(self): Generator("test_file.conllu", "test_output.txt", split="pairs").generate() file = open("test_output.txt", 'r') contents = file.readlines() file.close() print("contents: ", contents) expected = [ ' इसके अतिरिक्त गुग्गुल कुंड , भीम गुफा तथा भीमशिला भी दर्शनीय स्थल हैं । \t इसके अतिरिक्त स्थल <::::> गुग्गुल कुंड , भीम गुफा तथा भीमशिला भी स्थल \n', ' इसके अतिरिक्त गुग्गुल कुंड , भीम गुफा तथा भीमशिला भी दर्शनीय स्थल हैं । \t इसके अतिरिक्त स्थल <::::> दर्शनीय स्थल \n', ' इसके अतिरिक्त गुग्गुल कुंड , भीम गुफा तथा भीमशिला भी दर्शनीय स्थल हैं । \t गुग्गुल कुंड , भीम गुफा तथा भीमशिला भी स्थल <::::> दर्शनीय स्थल \n' ] os.remove("test_output.txt") self.assertEqual(expected, contents)
def main(inputdirs, opts=None, **extraOpts): information = None if not opts and extraOpts: # extraOpts is only here for convenience if you want to specify options as keyword arguements # It requires you haven't specified opts opts = extraOpts if opts and len(inputdirs) > 0: extractor = Extractor(inputdirs, infoKls=Everything, tempDir=opts.get("tempDir"), stateKls=State, parserKls=Parser, extension=opts.get("extension", "js")) information = extractor.generate() information[PROJECT] = opts.get("project", None) information[VERSION] = opts.get("version", None) information[PROJECT_URL] = opts.get("projectUrl", None) information[COPYRIGHT_TAG] = opts.get("copyrightTag", None) templatedirs = opts.get("templateDirs", []) defaultTemplates = os.path.join(here, "templates") if defaultTemplates not in templatedirs: templatedirs.insert(0, defaultTemplates) gen = Generator(tempdir=opts.get("tempDir"), outDir=opts.get("outDir"), assetDirs=opts.get("assetDirs", None), showPrivate=opts.get("showPrivate", True), templateDirs=templatedirs) gen.process(information) else: optparser.error("Incorrect number of arguments") return information
def generate_control(): funcutils = FuncUtils() generator = Generator() control = input('请输入【once,while】 once:只执行一次测试, while:循环执行测试') if control == 'once': keywords_str = input('输入keywords,4个,通过空格区分') keywords = keywords_str.split(' ') generate(funcutils,generator,keywords) elif control == 'while': while True: keywords_str = input('输入keywords,4个,通过空格区分') keywords = keywords_str.split(' ') poems_generate=generate(funcutils,generator,keywords) print('老夫为你作诗一首,听好了\n') for poem in poems_generate: print(poem) else: print('输入不对,【once,while】 只允许这几种') sys.exit('退出')
def initialize(C, logger, relations, rel_weights): n_rel_typs = len(relations) if C.pos_only: no_rel = -1 else: no_rel = relations.index(C.no_rel_name) rel_weights[no_rel] = C.no_rel_weight logger.log("relations : {0}".format(relations)) logger.log("rel_weights : {0}".format(rel_weights)) #assert len(rel_weights) == 7 loss_func = get_loss_func(C.loss, no_rel=no_rel, class_weight=rel_weights) generator = Generator(C, relations=relations, no_rel=no_rel) #pdb.set_trace() return n_rel_typs, loss_func, generator
def train(n_epochs=6): funcutils = FuncUtils() generator = Generator() tf.summary.scalar('accuracy', generator.loss) # 生成准确率标量图 tf.summary.scalar('lr', generator.lr) # 生成准确率标量图 merge_summary = tf.summary.merge_all() batch_no = 0 saver = tf.train.Saver() tf_config = tf.ConfigProto() tf_config.gpu_options.allow_growth = True loss_best = 1000 with tf.Session(config=tf_config) as sess: train_writer = tf.summary.FileWriter( _logwriter_dir, sess.graph) #定义一个写入summary的目标文件,dir为写入文件地址 generator.initialize_session(sess, saver) for epoch_iter in range(n_epochs): # print('epoch_iter\t{}'.format(epoch_iter)) for keywords, contexts, sentences in batch_train_data(_BATCH_SIZE): # print('-4-'*5) if batch_no % 32 == 0: # print('len(poems_set)==>',len(poems_set)) print_onehot, logits, lr, loss, summary_value = _train_a_batch( funcutils, generator, sess, epoch_iter, keywords, contexts, sentences, merge_summary) train_writer.add_summary(summary_value, batch_no) print('epoch\t{}\tloss\t{}lr\t{}'.format( epoch_iter, loss, lr)) # print('print_onehot==>',print_onehot) # print('logits==>',logits) if loss < loss_best: saver.save(sess, _model_path) loss_best = loss batch_no += 1
def init(): global planner, generator2 # load the pre-trained Keras model planner = Planner() generator2 = Generator()
from flask import Flask, jsonify, request from generate import Generator import json app = Flask(__name__) gen = Generator("ZeldaGAN/ZeldaFixedDungeonsAlNoDoors_10000_10.pth", 6, 10) @app.route("/api/get-level", methods=['GET']) def get_level(): level = gen.generate() return jsonify(level) @app.route("/api/get-level", methods=['POST']) def get_level_post(): data = request.get_json() level = [gen.generate(vector=v) for v in data['vectors']] return jsonify(level) if __name__ == "__main__": app.run()
dest='planner', default=False, action='store_true', help='train planning model') parser.add_argument('-g', dest='generator', default=False, action='store_true', help='train generation model') parser.add_argument('-a', dest='all', default=False, action='store_true', help='train both models') parser.add_argument('--clean', dest='clean', default=False, action='store_true', help='delete all models') args = parser.parse_args() if args.clean: for f in os.listdir(save_dir): os.remove(os.path.join(save_dir, f)) else: if args.all or args.planner: train_planner() if args.all or args.generator: generator = Generator() generator.train(n_epochs=10) print("All training is done!")
def setUp(self): self.new_user = Generator("instagram", "codeprospect", "4568")
def create_social(g_media, g_account, g_password): new_social = Generator(g_media, g_account, g_password) return new_social
def generate(number): return Generator.password(number)
loss = total_eucl_loss(model.output, labels, len(labels), batch_size=batch_size) # opt = keras.optimizers.SGD(lr=1e-5, momentum=0.9, decay=1e-6, nesterov=True).get_updates(params, [], loss) opt = MultiSGD(lr=1e-5, momentum=0.9, decay=1e-6, nesterov=True, lr_mult=optimizer_lr_mult(model)).get_updates( params, [], loss) train_model = K.function(model.input + labels + [K.learning_phase()], [loss], opt) # define generaters t_generator = Generator("images/train_dataset_2014.npy") v_generator = Generator("images/valid_dataset_2014.npy") # train for model for epoch in range(epochs): t_total_coss = 0 t_steps = t_generator.samples_length // batch_size for step in range(t_steps): # train X, Y = t_generator.generate(batch_size=batch_size, is_random=True).__next__() t_coss = train_model(X + Y)[0] t_total_coss += t_coss # watch
# define predictor v_predictor = Predictor() # define generator train_paths = [ 'images/300W_LP/AFW_GEN', 'images/300W_LP/HELEN_GEN', 'images/300W_LP/IBUG_GEN', 'images/300W_LP/LFPW_GEN', 'images/300W_LP/AFW_Flip_GEN', 'images/300W_LP/HELEN_Flip_GEN', 'images/300W_LP/IBUG_Flip_GEN', 'images/300W_LP/LFPW_Flip_GEN' ] valid_path = 'images/AFLW2000_GEN' mask_path = 'images/uv_weight_mask2.png' t_generator = Generator(train_paths=train_paths, valid_path=valid_path, mask_path=mask_path, image_shape=image_shape, batch_size=batch_size).generate(is_training=True) v_generator = Generator(train_paths=train_paths, valid_path=valid_path, mask_path=mask_path, image_shape=image_shape, batch_size=batch_size).generate(is_training=False) saver = tf.train.Saver() with tf.Session() as sess: # initial variables sess.run(tf.local_variables_initializer()) sess.run(tf.global_variables_initializer())
def display_existing_accounts(): return Generator.display_passwords()
def testpasswordGenerator(self): passwordd = Generator.password('8') self.assertEqual(Generator.passwords[1], passwordd)
def load_gpt2(self, model_name): if self.gpt2: del self.gpt2 torch.cuda.empty_cache() self.gpt2 = Generator(model_name)
def testDisplayPasswords(self): self.assertEqual(Generator.display_passwords(),Generator.passwords)