Пример #1
0
def open_database(db_file_path):
    print(
        'Establishing a connection with a database at {}'.format(db_file_path))
    file_util.make_parent_dirs(db_file_path)
    connection = sqlite3.connect(db_file_path)
    cursor = connection.cursor()
    return connection, cursor
Пример #2
0
def save_ckpt(model, optimizer, lr_scheduler, best_value, config, args, output_file_path):
    file_util.make_parent_dirs(output_file_path)
    model_state_dict =\
        model.module.state_dict() if isinstance(model, nn.parallel.DistributedDataParallel) else model.state_dict()
    misc_util.save_on_master({'model': model_state_dict, 'optimizer': optimizer.state_dict(), 'best_value': best_value,
                              'lr_scheduler': lr_scheduler.state_dict(), 'config': config, 'args': args},
                             output_file_path)
def save_ckpt(model, acc, epoch, ckpt_file_path, model_type):
    print('Saving..')
    state = {
        'type': model_type,
        'model': model.state_dict(),
        'acc': acc,
        'epoch': epoch,
    }
    file_util.make_parent_dirs(ckpt_file_path)
    torch.save(state, ckpt_file_path)
def analyze_with_mean_inputs(model, input_shape, data_loader, device,
                             split_name, method, dim, model_type,
                             output_file_path):
    if output_file_path is None:
        output_file_path = './{}_with_mean_inputs_by_{}.eps'.format(
            model_type, '{}_{}-dim'.format(method, dim))

    file_util.make_parent_dirs(output_file_path)
    model = model.module if isinstance(model, DataParallel) else model
    input_batch = torch.rand(input_shape).unsqueeze(0).to(device)
    module_wrap_util.wrap_decomposable_modules(model,
                                               RepresentationWrapper,
                                               input_batch,
                                               method=method,
                                               dim=dim)
    if device.type == 'cuda':
        model = DataParallel(model)

    model.eval()
    accumulated_tensor_dict = dict()
    with torch.no_grad():
        print('Computing mean inputs ...')
        for batch_idx, (sample_batch, targets) in enumerate(data_loader):
            for x, y in zip(sample_batch, targets):
                class_label = y.item()
                if class_label not in accumulated_tensor_dict:
                    accumulated_tensor_dict[class_label] = [x, 1]
                else:
                    accumulated_tensor_dict[class_label][0] += x
                    accumulated_tensor_dict[class_label][1] += 1

        mean_input_list = list()
        for y, (x, num_samples) in accumulated_tensor_dict.items():
            mean_x = x / num_samples
            mean_input_list.append(mean_x)

        mean_batch = torch.stack(mean_input_list)
        print('Analyzing layer-wise discriminability ...')
        preds = model(mean_batch)

    transformed_output_list = list()
    name_list = list()
    extract_transformed_outputs(model, transformed_output_list, name_list)
    xs = list(range(len(name_list)))
    discriminabilities = assess_discriminabilities(transformed_output_list)
    plt.plot(xs, discriminabilities, label=method)
    plt.xticks(xs, name_list, rotation=90)
    plt.xlabel('Layer')
    plt.ylabel('Discriminability')
    plt.title(split_name)
    plt.legend()
    plt.savefig(output_file_path)
    plt.show()
Пример #5
0
def save_ckpt(autoencoder, epoch, best_avg_loss, ckpt_file_path, ae_type):
    print('Saving..')
    module = autoencoder.module if isinstance(autoencoder,
                                              (DistributedDataParallel,
                                               DataParallel)) else autoencoder
    state = {
        'type': ae_type,
        'model': module.state_dict(),
        'epoch': epoch + 1,
        'best_value': best_avg_loss
    }
    file_util.make_parent_dirs(ckpt_file_path)
    torch.save(state, ckpt_file_path)
Пример #6
0
def save_ckpt(student_model, epoch, best_valid_value, ckpt_file_path,
              teacher_model_type):
    print('Saving..')
    module =\
        student_model.module if isinstance(student_model, (DataParallel, DistributedDataParallel)) else student_model
    state = {
        'type': teacher_model_type,
        'model': module.state_dict(),
        'epoch': epoch + 1,
        'best_valid_value': best_valid_value,
        'student': True
    }
    file_util.make_parent_dirs(ckpt_file_path)
    torch.save(state, ckpt_file_path)
def write_converted_dataset(data_list,
                            rgb_only,
                            output_file_path,
                            delimiter='\t'):
    file_util.make_parent_dirs(output_file_path)
    with open(output_file_path, 'w') as fp:
        for label_name, image_file_paths in data_list:
            for image_file_path in image_file_paths:
                if rgb_only:
                    img = Image.open(image_file_path)
                    if img.mode != 'RGB':
                        continue
                fp.write('{}{}{}\n'.format(image_file_path, delimiter,
                                           label_name))