def test(epoch): global best_acc net.eval() test_loss = 0 correct = 0 total = 0 for batch_idx, (inputs, targets) in enumerate(testloader): if use_cuda: inputs, targets = inputs.cuda(), targets.cuda() inputs, targets = Variable(inputs, volatile=True), Variable(targets) outputs = net(inputs) loss = criterion(outputs, targets) test_loss += loss.data[0] _, predicted = torch.max(outputs.data, 1) total += targets.size(0) correct += predicted.eq(targets.data).cpu().sum() progress_bar( batch_idx, len(testloader), 'Loss: %.3f | Acc: %.3f%% (%d/%d)' % (test_loss / (batch_idx + 1), 100. * correct / total, correct, total)) test_accuracy_list[epoch] = 100. * correct / total acc = 100. * correct / total if acc > best_acc: dump_acc_record(acc, net, use_cuda, epoch, args) dump_record(train_accuracy_list, test_accuracy_list, learning_rate_list, loss_function_list, args) best_acc = acc
default=0, type=float, help='incorrect_penalty') parser.add_argument('--normalize', action='store_true', help='normalize the rewards') parser.add_argument('--epochs_to_train', default=300, type=int) args = parser.parse_args() use_cuda = torch.cuda.is_available() trainloader, testloader, _ = transform_dataset(batch_size=400) # Model storedNet, trainList = _initilization_(args, use_cuda) (net, best_acc, start_epoch), (train_accuracy_list, test_accuracy_list, learning_rate_list, loss_function_list) = storedNet, trainList criterion = parser_loss_function(args=args) optimizer = optim.SGD(net.parameters(), lr=args.lr, momentum=0.9, weight_decay=5e-4) # Training for epoch in range(start_epoch, start_epoch + args.epochs_to_train): train(epoch) test(epoch) dump_record(train_accuracy_list, test_accuracy_list, learning_rate_list, loss_function_list, args)
u16 key_sz; char key[]; }; ''' filename = sys.argv[1] fd = open(filename, 'rb') data = fd.read() p = 0 p += utils.dump_record( 'index header', [ 'magic', 'last_record_logno', 'last_record_offset', 'keys_stored', 'checksum' ], "<IiQQI", data, p, magic=0x43211234, ) i = itertools.count(1) while p < len(data): p += utils.dump_record( 'item %i' % i.next(), [ 'magic', 'checksum', 'logno', 'value_offset', 'key_sz', 'r', 'value_sz' ], "<IIiQHHI",
struct ydb_value_record{ u32 checksum; }; ''' filename = sys.argv[1] fd = open(filename, 'rb') data = fd.read() p = 0 i = itertools.count(1) while p < len(data): p += utils.dump_record('record', ['magic', 'checksum', 'flags', 'key_sz', 'value_sz', 'key'], "<IIHHI", data, p, magic = 0x7DB5EC5D, ) p += utils.dump_record('item %i' % i.next(), ['magic', 'checksum', 'logno', 'value_offset', 'key_sz', 'r','value_sz'], "<IIiQHHI", data, p, indent=4, magic = 0x12344321, )
u64 value_offset; u32 value_sz; u16 key_sz; char key[]; }; ''' filename = sys.argv[1] fd = open(filename, 'rb') data = fd.read() p = 0 p += utils.dump_record('index header', ['magic', 'last_record_logno', 'last_record_offset', 'keys_stored', 'checksum'], "<IiQQI", data, p, magic = 0x43211234, ) i = itertools.count(1) while p < len(data): p += utils.dump_record('item %i' % i.next(), ['magic', 'checksum', 'logno', 'value_offset', 'key_sz', 'r','value_sz'], "<IIiQHHI", data, p, indent=4, key_len = 'key_sz', magic = 0x12344321, )