def main(args): jsons = { 'train': 'dump/train_si284/deltafalse/data.json', 'dev': 'dump/test_dev93/deltafalse/data.json', 'test': 'dump/test_eval92/deltafalse/data.json' } spk2genders = { 'train': 'train_si284/spk2gender', 'dev': 'test_dev93/spk2gender', 'test': 'test_eval92/spk2gender' } utils.safe_copytree(args.data_root, args.temp_root) if not os.path.exists(args.model_dir): os.makedirs(args.model_dir) logging.basicConfig(filename=os.path.join(args.model_dir, args.log_file), filemode='a', level=logging.INFO) torch.manual_seed(args.seed) torch.backends.cudnn.deterministic = args.determ np.random.seed(args.seed) if not args.eval_only: train(args, jsons, spk2genders) evaluate(args, jsons, spk2genders) if args.cleanup: utils.safe_rmtree(args.temp_root)
def main(args): ''' Training and evaluation script for character-based CTC ASR on WSJ dataset, pre-processed by ESPnet toolkit ''' jsons = { 'train': 'dump/train_si284/deltafalse/data.json', 'dev': 'dump/test_dev93/deltafalse/data.json', 'test': 'dump/test_eval92/deltafalse/data.json' } # if the temporary directory contains a json, we'll assume it's correct if not os.path.exists(os.path.join(args.temp_root, jsons['train'])): # copy the data for faster reading than NFS utils.safe_copytree(args.data_root, args.temp_root) # if model_dir is specified, and it doesn't contain the log file, make it log_file = os.path.join(args.model_dir, args.log_file) if args.model_dir is not None and not os.path.exists(log_file): utils.safe_makedirs(args.model_dir) logging.basicConfig(filename=log_file, filemode='a', level=logging.INFO) torch.manual_seed(args.seed) torch.backends.cudnn.deterministic = not args.nondeterm np.random.seed(args.seed) if not args.eval_only: utils.safe_json_dump(vars(args), os.path.join(args.model_dir, 'args.json')) epoch_stats = train(args, jsons) utils.safe_json_dump(epoch_stats, os.path.join(args.model_dir, 'epoch_stats.json')) if args.eval_only: data_root, temp_root = args.data_root, args.temp_root test, cpu, seed, cleanup = args.test, args.cpu, args.seed, args.cleanup with open(os.path.join(args.model_dir, 'args.json'), 'r') as f: json_dict = json.load(f) args = argparse.Namespace(**json_dict) args.data_root, args.temp_root = data_root, temp_root args.test, args.cpu, args.seed, args.cleanup = test, cpu, seed, cleanup evaluate(args, jsons) if args.cleanup: utils.safe_rmtree(args.temp_root)
def generate_batch(batch): host = 'http://112.124.117.97' zip_dir = '/usr/share/nginx/html' import tempfile to_dir = tempfile.mkdtemp() print to_dir # generate all qrcode in that temp dir for i in xrange(1, int(batch.count)+1): record = Record(batch=batch, index=i, serial_num=utils.generate_serial_num(), left_time=batch.verify_time) record.save() url = urlparse.urljoin(host, record.serial_num) filepath = os.path.join(to_dir, record.serial_num + '.png') utils.generate_qrcode(url, filepath) zip_filepath = os.path.join(zip_dir, batch.bid + '.zip') utils.zipdir(to_dir, zip_filepath) utils.safe_rmtree(to_dir)
if __name__ == '__main__': connect('paibei') host = 'http://112.124.117.97' zip_dir = '/tmp/qrcode' import tempfile to_dir = tempfile.mkdtemp() print to_dir batch_id = sys.argv[1] added_count = sys.argv[2] batch = get_batch(batch_id) currentIndex = batch.count # query current record index in batch for i in xrange(currentIndex + 1, currentIndex + added_count + 1): record = Record(batch=batch, index=i, serial_num=utils.generate_serial_num(), left_time=batch.verify_time) record.save() url = urlparse.urljoin(host, batch.bid) filepath = os.path.join(to_dir, record.serial_num + '.png') utils.generate_qrcode(url, filepath) zip_filepath = os.path.join(zip_dir, batch.bid + '.zip') utils.zipdir(to_dir, zip_filepath) utils.safe_rmtree(to_dir)