mask[-1] = 1

        # train_loader a generator: (data, label)
        (data, label) = next(train_loader)

        # Return DataTuple(!) and an empty (aux) tuple.
        return DataTuple(data, label), MaskAuxTuple(mask.type(torch.uint8))


if __name__ == "__main__":
    """ Tests sequence generator - generates and displays a random sample"""

    # "Loaded parameters".
    from utils.param_interface import ParamInterface 
    params = ParamInterface()
    params.add_default_params({'batch_size': 3, 'start_index': 0, 'stop_index': 54999,
              'use_train_data': True, 'mnist_folder': '~/data/mnist'})

    # Create problem object.
    problem = SequentialPixelMNIST(params)
    # Get generator
    generator = problem.return_generator()
    # Get batch.
    num_rows = 28
    num_columns = 28
    sample_num = 0
    data_tuple, _ = next(generator)
    x, y = data_tuple

    print(x.size())

    # Display single sample (0) from batch.
Exemplo n.º 2
0
              :num_bits] = query_matrix[:self.NUM_QUESTIONS, :num_bits]

        return Q


if __name__ == "__main__":
    """ Tests Shape-Color-Query - generates and displays a sample"""

    # "Loaded parameters".
    from utils.param_interface import ParamInterface 
    params = ParamInterface()
    params.add_default_params({
        'batch_size': 10,
        'data_folder': '~/data/shape-color-query/',
        'data_filename': 'training.hy',
        'shuffle': True,
        "regenerate": True,
        'use_train_data': True,
        'dataset_size': 100,
        'img_size': 224})

    # Configure logger.
    logging.basicConfig(level=logging.DEBUG)
    logger.debug("params: {}".format(params))

    # Create problem object.
    problem = ShapeColorQuery(params)

    # Get generator
    generator = problem.return_generator()
Exemplo n.º 3
0
    # "Loaded parameters".
    from utils.param_interface import ParamInterface
    params = ParamInterface()
    params.add_default_params({
        'num_control_bits': 3,
        'num_data_bits': 8,  # input and output size
        'encoding_bit': 0,
        'solving_bit': 1,
        # controller parameters
        'controller': {
            'name': 'rnn',
            'hidden_state_size': 20,
            'num_layers': 1,
            'non_linearity': 'sigmoid'
        },
        'mae_interface': {
            'shift_size': 3
        },  # encoder interface parameters
        'mas_interface': {
            'shift_size': 3
        },  # solver interface parameters
        # memory parameters
        'memory': {
            'num_addresses': -1,
            'num_content_bits': 11
        },
        'visualization_mode': 2
    })
    logger.debug("params: {}".format(params))

    input_size = params["num_control_bits"] + params["num_data_bits"]
Exemplo n.º 4
0
    # "Loaded parameters".
    from utils.param_interface import ParamInterface
    params = ParamInterface()
    params.add_default_params({
        'num_control_bits': 2,
        'num_data_bits': 8,  # input and output size
        # controller parameters
        'controller': {
            'name': 'ffgru',
            'hidden_state_size': 5,
            'num_layers': 1,
            'non_linearity': 'none',
            'ff_output_size': 5
        },
        # interface parameters
        'interface': {
            'num_read_heads': 2,
            'shift_size': 3
        },
        # memory parameters
        'memory': {
            'num_addresses': 4,
            'num_content_bits': 7
        },
        'visualization_mode': 2
    })
    logger.debug("params: {}".format(params))

    input_size = params["num_control_bits"] + params["num_data_bits"]
    output_size = params["num_data_bits"]
Exemplo n.º 5
0
        # Return DataTuple(!) and an empty (aux) tuple.
        return DataTuple(data_padded, label), LabelAuxTuple(class_names)


if __name__ == "__main__":
    """ Tests sequence generator - generates and displays a random sample"""

    # "Loaded parameters".
    from utils.param_interface import ParamInterface
    params = ParamInterface()
    params.add_default_params({
        'batch_size': 2,
        'start_index': 0,
        'stop_index': 54999,
        'use_train_data': True,
        'mnist_folder': '~/data/mnist',
        'padding': [4, 4, 3, 3],
        'up_scaling': False
    })

    # Create problem object.
    problem = MNIST(params)
    # Get generator
    generator = problem.return_generator()
    # Get batch.
    dt, at = next(generator)

    # Display single sample (0) from batch.
    problem.show_sample(dt, at, 0)
Exemplo n.º 6
0
        return DataTuple(data_padded, label), LabelAuxTuple(class_names)


if __name__ == "__main__":
    """ Tests sequence generator - generates and displays a random sample"""
    np.random.seed(0)
    torch.manual_seed(0)

    # "Loaded parameters".
    from utils.param_interface import ParamInterface
    params = ParamInterface()
    params.add_default_params({
        'batch_size': 2,
        'start_index': 0,
        'stop_index': 40000,
        'use_train_data': True,
        'folder': '~/data/cifar10',
        'padding': [0, 0, 0, 0],
        'up_scaling': True
    })

    # Create problem object.
    problem = CIFAR10(params)
    # Get generator
    generator = problem.return_generator()
    # Get batch.
    dt, at = next(generator)

    # Display single sample (0) from batch.
    problem.show_sample(dt, at, 0)