def generate_reference_frame(timeslots, subcarriers, active_subcarriers, cp_len, cs_len, alpha=.2): p_seed = utils.generate_seed('awesome preamble') f_seed = utils.generate_seed('awesome frame') subcarrier_map = mapping.get_subcarrier_map(subcarriers, active_subcarriers, dc_free=True) overlap = 2 frame_preamble, x_preamble = preamble.mapped_preamble( p_seed, 'rrc', alpha, active_subcarriers, subcarriers, subcarrier_map, overlap, cp_len, cs_len) d = utils.get_random_qpsk(timeslots * active_subcarriers, f_seed) d_frame = mod_frame = gfdm_modulation.modulate_mapped_gfdm_block( d, timeslots, subcarriers, active_subcarriers, overlap, alpha, dc_free=True) symbol = cyclic_prefix.add_cyclic_starfix(d_frame, cp_len, cs_len) window_ramp = cyclic_prefix.get_raised_cosine_ramp( cs_len, cyclic_prefix.get_window_len(cp_len, timeslots, subcarriers, cs_len)) d_frame = cyclic_prefix.pinch_block(symbol, window_ramp) H = filters.get_frequency_domain_filter('rrc', alpha, timeslots, subcarriers, overlap) return np.concatenate( (frame_preamble, d_frame)), mod_frame, x_preamble, d, H
def generate_integrated_frame(timeslots, subcarriers, active_subcarriers, cp_len, cs_len, alpha=.2): p_seed = utils.generate_seed('awesome preamble') f_seed = utils.generate_seed('awesome frame') subcarrier_map = mapping.get_subcarrier_map(subcarriers, active_subcarriers, dc_free=True) overlap = 2 p, p_vals = preamble.symmetric_mapped_preamble(p_seed, 'rrc', alpha, active_subcarriers, subcarriers, subcarrier_map, overlap, cp_len, cs_len) frame_preamble, x_preamble = p p = gfdm_modulation.modulate_mapped_gfdm_block(np.concatenate((p_vals, p_vals, np.zeros((timeslots - 2) * active_subcarriers))), timeslots, subcarriers, active_subcarriers, overlap, alpha, dc_free=True) x_preamble = p[0:len(x_preamble)] d = utils.get_random_qpsk((timeslots - 4) * active_subcarriers, f_seed) d = np.tile(p_vals, timeslots) # d = np.concatenate((p_vals, p_vals, d, p_vals, p_vals)) # d = utils.get_random_qpsk((timeslots - 2) * active_subcarriers, f_seed) # d = np.concatenate((p_vals, p_vals, d)) d_frame = mod_frame = gfdm_modulation.modulate_mapped_gfdm_block(d, timeslots, subcarriers, active_subcarriers, overlap, alpha, dc_free=True) symbol = cyclic_prefix.add_cyclic_starfix(d_frame, cp_len, cs_len) window_ramp = cyclic_prefix.get_raised_cosine_ramp(cs_len, cyclic_prefix.get_window_len(cp_len, timeslots, subcarriers, cs_len)) # d_frame = cyclic_prefix.pinch_block(symbol, window_ramp) H = filters.get_frequency_domain_filter('rrc', alpha, timeslots, subcarriers, overlap) return p, mod_frame, x_preamble, d, H
def generate_test_sync_samples(M, K, L, alpha, cp_len, ramp_len, snr_dB, test_cfo, init_phase=0.0, ref_data=False): block_len = M * K data = get_random_qpsk(block_len, seed=generate_seed('awesomepayloadblabla')) print 'QPSK source energy: ', calculate_average_signal_energy(data) x = get_gfdm_frame(data, alpha, M, K, L, cp_len, ramp_len) pn_symbols = get_random_qpsk(K, seed=generate_seed('awesome')) preamble, x_preamble = generate_sync_symbol(pn_symbols, 'rrc', alpha, K, L, cp_len, ramp_len) print 'frame energy:', calculate_average_signal_energy(x), 'preamble energy:', calculate_average_signal_energy(preamble) frame = np.concatenate((preamble, x)) print 'tx frame len', len(frame), 'len(preamble)', len(preamble) # simulate Noise and frequency offset! phase_inc = cfo_to_phase_increment(test_cfo, K) print 'phase_increment: ', phase_inc wave = complex_sine(phase_inc, len(frame), init_phase) # phase_shift = np.repeat(np.exp(1j * init_phase), len(frame)) # wave *= phase_shift frame *= wave noise_variance = calculate_awgn_noise_variance(frame, snr_dB) s = get_complex_noise_vector(2 * block_len + len(frame), noise_variance) s[block_len:block_len + len(frame)] += frame if ref_data: return s, x_preamble, pn_symbols, data return s, x_preamble, pn_symbols
def generate_integrated_frame(timeslots, subcarriers, active_subcarriers, cp_len, cs_len, alpha=.2): p_seed = utils.generate_seed('awesome preamble') f_seed = utils.generate_seed('awesome frame') subcarrier_map = mapping.get_subcarrier_map(subcarriers, active_subcarriers, dc_free=True) overlap = 2 p, p_vals = preamble.symmetric_mapped_preamble(p_seed, 'rrc', alpha, active_subcarriers, subcarriers, subcarrier_map, overlap, cp_len, cs_len) frame_preamble, x_preamble = p p = gfdm_modulation.modulate_mapped_gfdm_block(np.concatenate( (p_vals, p_vals, np.zeros((timeslots - 2) * active_subcarriers))), timeslots, subcarriers, active_subcarriers, overlap, alpha, dc_free=True) x_preamble = p[0:len(x_preamble)] d = utils.get_random_qpsk((timeslots - 4) * active_subcarriers, f_seed) d = np.tile(p_vals, timeslots) # d = np.concatenate((p_vals, p_vals, d, p_vals, p_vals)) # d = utils.get_random_qpsk((timeslots - 2) * active_subcarriers, f_seed) # d = np.concatenate((p_vals, p_vals, d)) d_frame = mod_frame = gfdm_modulation.modulate_mapped_gfdm_block( d, timeslots, subcarriers, active_subcarriers, overlap, alpha, dc_free=True) symbol = cyclic_prefix.add_cyclic_starfix(d_frame, cp_len, cs_len) window_ramp = cyclic_prefix.get_raised_cosine_ramp( cs_len, cyclic_prefix.get_window_len(cp_len, timeslots, subcarriers, cs_len)) # d_frame = cyclic_prefix.pinch_block(symbol, window_ramp) H = filters.get_frequency_domain_filter('rrc', alpha, timeslots, subcarriers, overlap) return p, mod_frame, x_preamble, d, H
def generate_reference_frame(timeslots, subcarriers, active_subcarriers, cp_len, cs_len, alpha=.2): p_seed = utils.generate_seed('awesome preamble') f_seed = utils.generate_seed('awesome frame') subcarrier_map = mapping.get_subcarrier_map(subcarriers, active_subcarriers, dc_free=True) overlap = 2 frame_preamble, x_preamble = preamble.mapped_preamble(p_seed, 'rrc', alpha, active_subcarriers, subcarriers, subcarrier_map, overlap, cp_len, cs_len) d = utils.get_random_qpsk(timeslots * active_subcarriers, f_seed) d_frame = mod_frame = gfdm_modulation.modulate_mapped_gfdm_block(d, timeslots, subcarriers, active_subcarriers, overlap, alpha, dc_free=True) symbol = cyclic_prefix.add_cyclic_starfix(d_frame, cp_len, cs_len) window_ramp = cyclic_prefix.get_raised_cosine_ramp(cs_len, cyclic_prefix.get_window_len(cp_len, timeslots, subcarriers, cs_len)) d_frame = cyclic_prefix.pinch_block(symbol, window_ramp) H = filters.get_frequency_domain_filter('rrc', alpha, timeslots, subcarriers, overlap) return np.concatenate((frame_preamble, d_frame)), mod_frame, x_preamble, d, H
def show_registration(): user = utils.get_user_from_cookie(request) page_name = 'register' if request.method.lower() == 'get': page_content = render_template("register.html") return render_page(page_content, "register", user=user) if request.method.lower() == 'post': username = request.form.get("username") or "" password = request.form.get("password") or "" if not username or not password: page_content = render_template("register.html", message='Missing field') return render_page(page_content, page_name) if utils.check_username(username): page_content = render_template("register.html", message='That username is taken!') return render_page(page_content, page_name) seed = utils.generate_seed(username, request.remote_addr) totp_key = utils.get_totp_key(seed) utils.register_user(username, password, request.remote_addr) qr_url = 'http://api.qrserver.com/v1/create-qr-code/?data=otpauth://totp/%s?secret=%s&size=220x220&margin=0' % ( username, totp_key) page_content = render_template( "register.html", message= "Success! <a href='/login'>login here</a><br />TOTP Key: %s<br /><img src='%s' />" % (totp_key, qr_url)) return render_page(page_content, page_name)
def show_registration(): user = utils.get_user_from_cookie(request) page_name = 'register' if request.method.lower() == 'get': page_content = render_template("register.html") return render_page(page_content, "register", user=user) if request.method.lower() == 'post': username = request.form.get("username") or "" password = request.form.get("password") or "" if not username or not password : page_content = render_template("register.html", message='Missing field') return render_page(page_content, page_name) if utils.check_username(username): page_content = render_template("register.html", message='That username is taken!') return render_page(page_content, page_name) seed = utils.generate_seed(username, request.remote_addr) totp_key = utils.get_totp_key(seed) utils.register_user(username, password, request.remote_addr) qr_url = 'http://api.qrserver.com/v1/create-qr-code/?data=otpauth://totp/%s?secret=%s&size=220x220&margin=0'%(username, totp_key) page_content = render_template( "register.html", message="Success! <a href='/login'>login here</a><br />TOTP Key: %s<br /><img src='%s' />" % (totp_key, qr_url) ) return render_page(page_content, page_name)
def show_registration(): user = utils.get_user_from_cookie(request) page_name = "register" if request.method.lower() == "get": page_content = render_template('register.html') return render_page(page_content, 'register', user=user) if request.method.lower() == "post": username = request.form.get('username') or '' password = request.form.get('password') or '' if not username or not password : page_content = render_template('register.html', message="Missing field") return render_page(page_content, page_name) if utils.check_username(username): page_content = render_template('register.html', message="That username is taken!") return render_page(page_content, page_name) seed = utils.generate_seed(username, request.remote_addr) totp_key = utils.get_totp_key(seed) utils.register_user(username, password, request.remote_addr) qr_url = "http://api.qrserver.com/v1/create-qr-code/?data=otpauth://totp/%s?secret=%s&size=220x220&margin=0"%(username, totp_key) page_content = render_template( 'register.html', message='Success! <a href="/login">login here</a><br />TOTP Key: %s<br /><img src="%s" />' % (totp_key, qr_url) ) return render_page(page_content, page_name)
def on_epoch_end(self, epoch, logs=None): duration_epoch = time.time() - self.time_epoch logger.info("epoch: %s, duration: %ds, loss: %.6g.", epoch, duration_epoch, logs["loss"]) # transfer weights from learning model self.inference_model.set_weights(self.model.get_weights()) # generate text seed = generate_seed(self.text) generate_text(self.inference_model, seed, top_n=10) # do validation test_time = time.time() self.test_model.set_weights(self.model.get_weights()) if self.test_text and epoch % 10 == 0: bpc = calculate_bpc(self.test_model, self.test_text) logger.info("bpc = %f" % bpc) logger.info("best bpc = %f" % LoggerCallback.best_val) if bpc < LoggerCallback.best_val: LoggerCallback.best_val = bpc self.model.save( self.checkpoint_path.replace('.ckpt', '_bestval.ckpt')) test_duration = time.time() - test_time logger.info("test duration: %ds" % test_duration)
def on_train_end(self, logs=None): duration_train = time.time() - self.time_train logger.info("end of training, duration: %ds.", duration_train) # transfer weights from learning model self.inference_model.set_weights(self.model.get_weights()) # generate text seed = generate_seed(self.text) generate_text(self.inference_model, seed, 1024, 3)
def main(): np.set_printoptions(precision=4, suppress=True) seed = generate_seed('awesome') fft_len = 32 active_subcarriers = 24 subcarrier_map = np.arange(fft_len) subcarrier_map = np.concatenate((subcarrier_map[0:active_subcarriers//2], subcarrier_map[-active_subcarriers//2:])) preamble, x_preamble = mapped_preamble(seed, 'rrc', .1, active_subcarriers, fft_len, subcarrier_map, 2, fft_len // 2, fft_len // 8) check_preamble_properties(preamble, x_preamble)
def on_epoch_end(self, epoch, logs=None): duration_epoch = time.time() - self.time_epoch logger.info("epoch: %s, duration: %ds, loss: %.6g.", epoch, duration_epoch, logs["loss"]) # Transfer weights from learning model self.inference_model.set_weights(self.model.get_weights()) # Generate text seed = generate_seed(self.text) generate_text(self.inference_model, seed)
def main(): np.set_printoptions(precision=4, suppress=True) # preamble_auto_corr_test() sync_test() return # cfo = 1.024e-5 samp_rate = 12.5e6 freq = 20. fft_len = 256 sc_bw = samp_rate / fft_len cfo = freq_to_cfo(freq, fft_len, samp_rate) ph_i = cfo_to_phase_increment(cfo, fft_len) phase_inc = phase_increment(freq, samp_rate) print 'samp_rate: {}, frequency: {}, fft_len: {}'.format( samp_rate, freq, fft_len) print 'subcarrier bandwidth: {}, cfo: {}, phase increment: {}/{}'.format( sc_bw, cfo, ph_i, phase_inc) # wave = get_complex_sine(freq, samp_rate, 129) # s = np.ones(129) # s = correct_frequency_offset(s, cfo, fft_len) # # print wave # print np.abs(wave - s) preamble, x_preamble = generate_sync_symbol( get_random_qpsk(fft_len, seed=generate_seed('awesome')), 'rrc', .5, fft_len, 2, fft_len // 2, fft_len // 8) init_phase = 0.8 wave = complex_sine(cfo_to_phase_increment(cfo, fft_len), len(preamble)) # phase_shift = np.repeat(, len(preamble)) wave *= np.exp(1j * init_phase) preamble *= wave print phase_inc ac = auto_correlate_halfs(preamble[64:64 + 2 * fft_len]) fp = np.angle(ac) ac_phase_inc = fp / fft_len print 'auto corr phase: {}, AC phase {}, phase_inc: {}'.format( phase_inc, fp, ac_phase_inc) xc = cross_correlate_signal(preamble, x_preamble) ap = np.angle(xc[64]) # print ap, (ap - init_phase) / (cfo * 2 * np.pi) residual = ap - init_phase res_phase = residual / (2 * fft_len) print residual, res_phase, phase_inc / res_phase # print fp, fp / (2 * np.pi) # print (ap - init_phase) - fp plt.plot(np.abs(xc) / 10000.) plt.plot(np.angle(xc)) plt.show()
def main(): import matplotlib.pyplot as plt np.set_printoptions(precision=4, suppress=True) # preamble_auto_corr_test() sync_test() return # cfo = 1.024e-5 samp_rate = 12.5e6 freq = 20. fft_len = 256 sc_bw = samp_rate / fft_len cfo = freq_to_cfo(freq, fft_len, samp_rate) ph_i = cfo_to_phase_increment(cfo, fft_len) phase_inc = phase_increment(freq, samp_rate) print 'samp_rate: {}, frequency: {}, fft_len: {}'.format(samp_rate, freq, fft_len) print 'subcarrier bandwidth: {}, cfo: {}, phase increment: {}/{}'.format(sc_bw, cfo, ph_i, phase_inc) # wave = get_complex_sine(freq, samp_rate, 129) # s = np.ones(129) # s = correct_frequency_offset(s, cfo, fft_len) # # print wave # print np.abs(wave - s) preamble, x_preamble = generate_sync_symbol(get_random_qpsk(fft_len, seed=generate_seed('awesome')), 'rrc', .5, fft_len, 2, fft_len // 2, fft_len // 8) init_phase = 0.8 wave = complex_sine(cfo_to_phase_increment(cfo, fft_len), len(preamble)) # phase_shift = np.repeat(, len(preamble)) wave *= np.exp(1j * init_phase) preamble *= wave print phase_inc ac = auto_correlate_halfs(preamble[64:64 + 2 * fft_len]) fp = np.angle(ac) ac_phase_inc = fp / fft_len print 'auto corr phase: {}, AC phase {}, phase_inc: {}'.format(phase_inc, fp, ac_phase_inc) xc = cross_correlate_signal(preamble, x_preamble) ap = np.angle(xc[64]) # print ap, (ap - init_phase) / (cfo * 2 * np.pi) residual = ap - init_phase res_phase = residual / (2 * fft_len) print residual, res_phase, phase_inc / res_phase # print fp, fp / (2 * np.pi) # print (ap - init_phase) - fp plt.plot(np.abs(xc) / 10000.) plt.plot(np.angle(xc)) plt.show()
def generate_test_sync_samples(M, K, L, alpha, cp_len, ramp_len, snr_dB, test_cfo): block_len = M * K data = get_random_qpsk(block_len, seed=generate_seed('awesomepayloadblabla')) x = get_gfdm_frame(data, alpha, M, K, L, cp_len, ramp_len) preamble, x_preamble = generate_sync_symbol( get_random_qpsk(K, seed=generate_seed('awesome')), 'rrc', alpha, K, L, cp_len, ramp_len) print 'frame energy:', calculate_average_signal_energy( x), 'preamble energy:', calculate_average_signal_energy(preamble) preamble *= np.sqrt( calculate_average_signal_energy(x) / calculate_average_signal_energy(preamble)) frame = np.concatenate((preamble, x)) # simulate Noise and frequency offset! frame = correct_frequency_offset(frame, test_cfo / (-2. * K)) noise_variance = calculate_awgn_noise_variance(frame, snr_dB) s = get_complex_noise_vector(2 * block_len + len(frame), noise_variance) s[block_len:block_len + len(frame)] += frame return s, x_preamble
def generate_main(args): """ generates text from trained model specified in args. main method for generate subcommand. """ inference_model = retrieve_model(args) # create seed if not specified if args.seed is None: with open(args.text_path) as f: text = f.read() seed = generate_seed(text) logger.info("seed sequence generated from %s.", args.text_path) else: seed = args.seed return generate_text(inference_model, seed, args.length, args.top_n)
def __call__(self, trainer): duration_epoch = time.time() - self.time_epoch epoch = trainer.updater.epoch loss = trainer.observation["main/loss"].data logger.info("epoch: %s, duration: %ds, loss: %.6g.", epoch, duration_epoch, loss) # get rnn state model = trainer.updater.get_optimizer("main").target state = model.predictor.get_state() # generate text seed = generate_seed(self.text) generate_text(model, seed) # set rnn back to training state model.predictor.set_state(state) # reset time self.time_epoch = time.time()
def generate_main(args): """ generates text from trained model specified in args. main method for generate subcommand. """ # load learning model for config and weights model = load_model(args.checkpoint_path) # build inference model and transfer weights inference_model = build_inference_model(model) inference_model.set_weights(model.get_weights()) logger.info("model loaded: %s.", args.checkpoint_path) # create seed if not specified if args.seed is None: with open(args.text_path) as f: text = f.read() seed = generate_seed(text) logger.info("seed sequence generated from %s.", args.text_path) else: seed = args.seed return generate_text(inference_model, seed, args.length, args.top_n)
def show_login(): page_name = 'login' if request.method.lower() == 'get': page_content = render_template("login.html") return render_page(page_content, "login") username = request.form.get("username") or "" password = request.form.get("password") or "" verification_code = request.form.get("verification_code") or "" if not (username and password and verification_code): page_content = render_template("login.html", message='Missing field') return render_page(page_content, page_name) if not utils.auth_user(username, password): page_content = render_template("login.html", message='Invalid credentials') return render_page(page_content, page_name) user = utils.check_username(username) seed = utils.generate_seed(username, user["user_ip"]) totp_key = utils.get_totp_key(seed) totp = pyotp.TOTP(totp_key) if verification_code != totp.now(): page_content = render_template("login.html", message='Invalid verification code') return render_page(page_content, page_name) # user/pass/totp all valid by now session_cookie = utils.make_cookie(app.config["COOKIE_SECRET"], username, request.remote_addr) response = app.make_response(redirect("/")) response.set_cookie('session', session_cookie) return response page_content = render_template("login.html") return render_page(page_content, page_name)
def show_login(): page_name = "login" if request.method.lower() == "get": page_content = render_template('login.html') return render_page(page_content, 'login') username = request.form.get('username') or '' password = request.form.get('password') or '' verification_code = request.form.get('verification_code') or '' if not (username and password and verification_code): page_content = render_template('login.html', message="Missing field") return render_page(page_content, page_name) if not utils.auth_user(username, password): page_content = render_template('login.html', message="Invalid credentials") return render_page(page_content, page_name) user = utils.check_username(username) seed = utils.generate_seed(username, user['user_ip']) totp_key = utils.get_totp_key(seed) totp = pyotp.TOTP(totp_key) if verification_code != totp.now(): page_content = render_template('login.html', message="Invalid verification code") return render_page(page_content, page_name) # user/pass/totp all valid by now session_cookie = utils.make_cookie(app.config['COOKIE_SECRET'], username, request.remote_addr) response = app.make_response(redirect('/')) response.set_cookie("session", session_cookie) return response page_content = render_template('login.html') return render_page(page_content, page_name)
def generate_main(args): """ generates text from trained model specified in args. main method for generate subcommand. """ # restore model inference_graph = tf.Graph() with inference_graph.as_default(): inference_model = load_inference_model(args.checkpoint_path) # create seed if not specified if args.seed is None: with open(args.text_path) as f: text = f.read() seed = generate_seed(text) logger.info("seed sequence generated from %s.", args.text_path) else: seed = args.seed with tf.Session(graph=inference_graph) as infer_sess: # restore weights inference_model["saver"].restore(infer_sess, args.checkpoint_path) return generate_text(inference_model, infer_sess, seed, args.length, args.top_n)
def train_main(args): """ trains model specfied in args. main method for train subcommand. """ # load text with open(args.text_path) as f: text = f.read() logger.info("corpus length: %s.", len(text)) # restore or build model if args.restore: logger.info("restoring model.") load_path = args.checkpoint_path if args.restore is True else args.restore model = Model.load(load_path) else: model = Model(vocab_size=VOCAB_SIZE, embedding_size=args.embedding_size, rnn_size=args.rnn_size, num_layers=args.num_layers, drop_rate=args.drop_rate) model.initialize(mx.init.Xavier()) model.hybridize() # make checkpoint directory make_dirs(args.checkpoint_path) model.save(args.checkpoint_path) # loss function loss = gluon.loss.SoftmaxCrossEntropyLoss(batch_axis=1) # optimizer optimizer = mx.optimizer.Adam(learning_rate=args.learning_rate, clip_gradient=args.clip_norm) # trainer trainer = gluon.Trainer(model.collect_params(), optimizer) # training start num_batches = (len(text) - 1) // (args.batch_size * args.seq_len) data_iter = batch_generator(encode_text(text), args.batch_size, args.seq_len) state = model.begin_state(args.batch_size) logger.info("start of training.") time_train = time.time() for i in range(args.num_epochs): epoch_losses = mx.nd.empty(num_batches) time_epoch = time.time() # training epoch for j in tqdm(range(num_batches), desc="epoch {}/{}".format(i + 1, args.num_epochs)): # prepare inputs x, y = next(data_iter) x = mx.nd.array(x.T) y = mx.nd.array(y.T) # reset state variables to remove their history state = [arr.detach() for arr in state] with autograd.record(): logits, state = model(x, state) # calculate loss L = loss(logits, y) L = F.mean(L) epoch_losses[j] = L.asscalar() # calculate gradient L.backward() # apply gradient update trainer.step(1) # logs duration_epoch = time.time() - time_epoch logger.info("epoch: %s, duration: %ds, loss: %.6g.", i + 1, duration_epoch, F.mean(epoch_losses).asscalar()) # checkpoint model.save_params(args.checkpoint_path) logger.info("model saved: %s.", args.checkpoint_path) # generate text seed = generate_seed(text) generate_text(model, seed) # training end duration_train = time.time() - time_train logger.info("end of training, duration: %ds.", duration_train) # generate text seed = generate_seed(text) generate_text(model, seed, 1024, 3) return model
def train_main(args): """ trains model specfied in args. main method for train subcommand. """ # load text with open(args.text_path) as f: text = f.read() logger.info("corpus length: %s.", len(text)) # data iterator data_iter = DataIterator(text, args.batch_size, args.seq_len) # load or build model if args.restore: logger.info("restoring model.") load_path = args.checkpoint_path if args.restore is True else args.restore model = load_model(load_path) else: net = Network(vocab_size=VOCAB_SIZE, embedding_size=args.embedding_size, rnn_size=args.rnn_size, num_layers=args.num_layers, drop_rate=args.drop_rate) model = L.Classifier(net) # make checkpoint directory log_dir = make_dirs(args.checkpoint_path) with open("{}.json".format(args.checkpoint_path), "w") as f: json.dump(model.predictor.args, f, indent=2) chainer.serializers.save_npz(args.checkpoint_path, model) logger.info("model saved: %s.", args.checkpoint_path) # optimizer optimizer = chainer.optimizers.Adam(alpha=args.learning_rate) optimizer.setup(model) # clip gradient norm optimizer.add_hook(chainer.optimizer.GradientClipping(args.clip_norm)) # trainer updater = BpttUpdater(data_iter, optimizer) trainer = chainer.training.Trainer(updater, (args.num_epochs, 'epoch'), out=log_dir) trainer.extend( extensions.snapshot_object(model, filename=os.path.basename( args.checkpoint_path))) trainer.extend(extensions.ProgressBar(update_interval=1)) trainer.extend(extensions.LogReport()) trainer.extend(extensions.PlotReport(y_keys=["main/loss"])) trainer.extend(LoggerExtension(text)) # training start model.predictor.reset_state() logger.info("start of training.") time_train = time.time() trainer.run() # training end duration_train = time.time() - time_train logger.info("end of training, duration: %ds.", duration_train) # generate text seed = generate_seed(text) generate_text(model, seed, 1024, 3) return model
def train_main(args): """ trains model specfied in args. main method for train subcommand. """ # load text with open(args.text_path) as f: text = f.read() logger.info("corpus length: %s.", len(text)) # load or build model if args.restore: logger.info("restoring model.") load_path = args.checkpoint_path if args.restore is True else args.restore model = Model.load(load_path) else: model = Model(vocab_size=VOCAB_SIZE, embedding_size=args.embedding_size, rnn_size=args.rnn_size, num_layers=args.num_layers, drop_rate=args.drop_rate) # make checkpoint directory make_dirs(args.checkpoint_path) model.save(args.checkpoint_path) # loss function and optimizer criterion = nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(), lr=args.learning_rate) # training start num_batches = (len(text) - 1) // (args.batch_size * args.seq_len) data_iter = batch_generator(encode_text(text), args.batch_size, args.seq_len) state = model.init_state(args.batch_size) logger.info("start of training.") time_train = time.time() for i in range(args.num_epochs): epoch_losses = torch.Tensor(num_batches) time_epoch = time.time() # training epoch for j in tqdm(range(num_batches), desc="epoch {}/{}".format(i + 1, args.num_epochs)): # prepare inputs x, y = next(data_iter) x = Variable(torch.from_numpy(x)).t() y = Variable(torch.from_numpy(y)).t().contiguous() # reset state variables to remove their history state = tuple([Variable(var.data) for var in state]) # prepare model model.train() model.zero_grad() # calculate loss logits, state = model.forward(x, state) loss = criterion(logits, y.view(-1)) epoch_losses[j] = loss.data[0] # calculate gradients loss.backward() # clip gradient norm nn.utils.clip_grad_norm(model.parameters(), args.clip_norm) # apply gradient update optimizer.step() # logs duration_epoch = time.time() - time_epoch logger.info("epoch: %s, duration: %ds, loss: %.6g.", i + 1, duration_epoch, epoch_losses.mean()) # checkpoint model.save(args.checkpoint_path) # generate text seed = generate_seed(text) generate_text(model, seed) # training end duration_train = time.time() - time_train logger.info("end of training, duration: %ds.", duration_train) # generate text seed = generate_seed(text) generate_text(model, seed, 1024, 3) return model
import hashlib from utils import generate_seed, get_totp_key username = "******" password_hash = "22e59a7a2792b25684a43d5f5229b2b5caf7abf8fa9f186249f35cae53387fa3" ip_addr = "64.124.192.210" seed = generate_seed(username, ip_addr) totp = get_totp_key(seed) print "[*] Found TOTP KEY: %s" % totp passwords = None with open('/usr/share/john/password.lst', 'r') as fd: passwords = fd.read().split() password = None for p in passwords: p_hash = hashlib.sha256(username+p).hexdigest() if p_hash == password_hash: password = p print "[*] Found password: %s" % p break if password and totp: flag = hashlib.md5(totp+password).hexdigest() print "[+] Found flag: %s" % flag
def train_main(args): """ trains model specfied in args. main method for train subcommand. """ # load text with open(args.text_path) as f: text = f.read() logger.info("corpus length: %s.", len(text)) # restore or build model if args.restore: load_path = args.checkpoint_path if args.restore is True else args.restore with open("{}.json".format(args.checkpoint_path)) as f: model_args = json.load(f) logger.info("model restored: %s.", load_path) else: load_path = None model_args = { "batch_size": args.batch_size, "vocab_size": VOCAB_SIZE, "embedding_size": args.embedding_size, "rnn_size": args.rnn_size, "num_layers": args.num_layers, "p_keep": 1 - args.drop_rate, "learning_rate": args.learning_rate, "clip_norm": args.clip_norm } # build train model train_graph = tf.Graph() with train_graph.as_default(): train_model = build_model(**model_args) with tf.Session(graph=train_graph) as train_sess: # restore or initialise model weights if load_path is not None: train_model["saver"].restore(train_sess, load_path) logger.info("model weights restored: %s.", load_path) else: train_sess.run(train_model["init_op"]) # clear checkpoint directory log_dir = make_dirs(args.checkpoint_path, empty=True) # save model with open("{}.json".format(args.checkpoint_path), "w") as f: json.dump(train_model["args"], f, indent=2) checkpoint_path = train_model["saver"].save(train_sess, args.checkpoint_path) logger.info("model saved: %s.", checkpoint_path) # tensorboard logger summary_writer = tf.summary.FileWriter(log_dir, train_sess.graph) # embeddings visualisation config = projector.ProjectorConfig() embedding = config.embeddings.add() embedding.tensor_name = "EmbedSequence/embeddings" embedding.metadata_path = os.path.abspath( os.path.join("data", "id2char.tsv")) projector.visualize_embeddings(summary_writer, config) logger.info("tensorboard set up.") # build infer model inference_graph = tf.Graph() with inference_graph.as_default(): inference_model = load_inference_model(args.checkpoint_path) # training start num_batches = (len(text) - 1) // (args.batch_size * args.seq_len) data_iter = batch_generator(encode_text(text), args.batch_size, args.seq_len) fetches = [ train_model["train_op"], train_model["output_state"], train_model["loss"], train_model["summary"] ] state = train_sess.run(train_model["input_state"]) logger.info("start of training.") time_train = time.time() for i in range(args.num_epochs): epoch_losses = np.empty(num_batches) time_epoch = time.time() # training epoch for j in tqdm(range(num_batches), desc="epoch {}/{}".format(i + 1, args.num_epochs)): x, y = next(data_iter) feed_dict = { train_model["X"]: x, train_model["Y"]: y, train_model["input_state"]: state } _, state, loss, summary_log = train_sess.run( fetches, feed_dict) epoch_losses[j] = loss # logs duration_epoch = time.time() - time_epoch logger.info("epoch: %s, duration: %ds, loss: %.6g.", i + 1, duration_epoch, epoch_losses.mean()) # tensorboard logs summary_writer.add_summary(summary_log, i + 1) summary_writer.flush() # checkpoint checkpoint_path = train_model["saver"].save( train_sess, args.checkpoint_path) logger.info("model saved: %s.", checkpoint_path) # generate text seed = generate_seed(text) with tf.Session(graph=inference_graph) as infer_sess: # restore weights inference_model["saver"].restore(infer_sess, checkpoint_path) generate_text(inference_model, infer_sess, seed) # training end duration_train = time.time() - time_train logger.info("end of training, duration: %ds.", duration_train) # generate text seed = generate_seed(text) with tf.Session(graph=inference_graph) as infer_sess: # restore weights inference_model["saver"].restore(infer_sess, checkpoint_path) generate_text(inference_model, infer_sess, seed, 1024, 3) return train_model