torch.CharTensor) * seq_length
        data_dict['num_subsequences'] = torch.ones([batch_size,
                                                    1]).type(torch.CharTensor)
        return data_dict


if __name__ == "__main__":
    """ Tests sequence generator - generates and displays a random sample"""

    # "Loaded parameters".
    from miprometheus.utils.param_interface import ParamInterface

    params = ParamInterface()
    params.add_config_params({  #'control_bits': 4,
        #'data_bits': 8,
        'min_sequence_length': 1,
        'max_sequence_length': 10
    })
    batch_size = 64

    # Create problem object.
    repeatserialrecallcl = RepeatSerialRecallCommandLines(params)

    # get a sample
    sample = repeatserialrecallcl[0]
    print(repr(sample))
    print('__getitem__ works.')

    # wrap DataLoader on top
    from torch.utils.data import DataLoader
            torch.CharTensor) * seq_length
        data_dict['num_subsequences'] = torch.ones([batch_size,
                                                    1]).type(torch.CharTensor)
        return data_dict


if __name__ == "__main__":
    """ Tests sequence generator - generates and displays a random sample"""

    # "Loaded parameters".
    from miprometheus.utils.param_interface import ParamInterface

    params = ParamInterface()
    params.add_config_params({  #'control_bits': 4,
        #'data_bits': 8,
        # 'randomize_control_lines': False,
        'min_sequence_length': 1,
        'max_sequence_length': 10
    })
    batch_size = 64

    # Create problem object.
    repeatreverserecallcl = RepeatReverseRecallCommandLines(params)

    # get a sample
    sample = repeatreverserecallcl[0]
    print(repr(sample))
    print('__getitem__ works.')

    # wrap DataLoader on top
    from torch.utils.data.dataloader import DataLoader
示例#3
0
    from miprometheus.utils.app_state import AppState
    from miprometheus.utils.param_interface import ParamInterface
    from torch.utils.data import DataLoader
    app_state = AppState()

    from miprometheus.problems import CLEVR
    problem_params = ParamInterface()
    problem_params.add_config_params({
        'settings': {
            'data_folder': '~/Downloads/CLEVR_v1.0',
            'set': 'train',
            'dataset_variant': 'CLEVR'
        },
        'images': {
            'raw_images': False,
            'feature_extractor': {
                'cnn_model': 'resnet101',
                'num_blocks': 4
            }
        },
        'questions': {
            'embedding_type': 'random',
            'embedding_dim': 300
        }
    })

    # create problem
    clevr_dataset = CLEVR(problem_params)
    print('Problem {} instantiated.'.format(clevr_dataset.name))

    # instantiate DataLoader object
    batch_size = 64
示例#4
0
        plt.show()


if __name__ == '__main__':
    """ Tests MultiHopsStackedAttentionNetwork on ShapeColorQuery"""

    # "Loaded parameters".
    from miprometheus.utils.param_interface import ParamInterface
    from miprometheus.utils.app_state import AppState
    app_state = AppState()
    app_state.visualize = False
    from miprometheus.problems import ShapeColorQuery
    problem_params = ParamInterface()
    problem_params.add_config_params({'data_folder': '~/data/shape-color-query/',
                                      'split': 'train',
                                      'regenerate': False,
                                      'dataset_size': 10000,
                                      'img_size': 128})

    # create problem
    shapecolorquery = ShapeColorQuery(problem_params)

    batch_size = 64

    # wrap DataLoader on top of this Dataset subclass
    from torch.utils.data import DataLoader

    dataloader = DataLoader(dataset=shapecolorquery, collate_fn=shapecolorquery.collate_fn,
                            batch_size=batch_size, shuffle=True, num_workers=4)

    model_params = ParamInterface()
示例#5
0
     # learning
    def set_max_length(self, max_length):
        self.max_sequence_length = max_length


if __name__ == "__main__":
    """ Tests sequence generator - generates and displays a random sample"""

    # "Loaded parameters".
    from miprometheus.utils.param_interface import ParamInterface

    params = ParamInterface()
    params.add_config_params({'name': 'serial_recall_original',
                              'control_bits': 4,
                              'data_bits': 8,
                              'min_sequence_length': 1,
                              'max_sequence_length': 10,
                              'num_subseq_min': 1,
                              'num_subseq_max': 4})
    batch_size = 64

    # Create problem object.
    interruptnot = InterruptionNot(params)

    # get a sample
    sample = interruptnot[0]
    print(repr(sample))
    print('__getitem__ works.')

    # wrap DataLoader on top
    from torch.utils.data.dataloader import DataLoader
示例#6
0
        # Plot!
        plt.show()


if __name__ == '__main__':
    # Set visualization.
    from miprometheus.utils.app_state import AppState
    AppState().visualize = True

    from miprometheus.utils.param_interface import ParamInterface
    from torch.utils.data.dataloader import DataLoader
    from miprometheus.problems import CIFAR10

    problem_params = ParamInterface()
    problem_params.add_config_params({'use_train_data': True,
                                      'root_dir': '~/data/cifar10',
                                      'padding': [0, 0, 0, 0],
                                      'up_scaling': True})
    batch_size = 64

    # create problem
    problem = CIFAR10(problem_params)
    print('Problem {} instantiated.'.format(problem.name))

    # instantiate DataLoader object
    dataloader = DataLoader(problem, batch_size=batch_size, collate_fn=problem.collate_fn)

    # Test base model.
    from miprometheus.utils.param_interface import ParamInterface
    model_params = ParamInterface()
    model_params.add_config_params({'pretrained': False})
示例#7
0
        'input_size': input_size,
        'output_size': 10,
        'center_size': 1,
        'center_size_per_module': 32,
        'num_modules': 4
    }

    # Initialize the application state singleton.
    from miprometheus.utils.app_state import AppState
    from miprometheus.utils.data_dict import DataDict
    app_state = AppState()
    app_state.visualize = True

    from miprometheus.utils.param_interface import ParamInterface
    params = ParamInterface()
    params.add_config_params(params_dict)
    model = ThalNetModel(params)

    seq_length = 10
    batch_size = 2

    # Check for different seq_lengts and batch_sizes.
    for i in range(62):
        # Create random Tensors to hold inputs and outputs
        x = torch.randn(batch_size, 1, input_size, input_size)
        logits = torch.randn(batch_size, 1, params_dict['output_size'])
        y = x
        data_dict = DataDict({'sequences': x, 'targets': y})

        # Test forward pass.
        y_pred = model(data_dict)
示例#8
0
        data_dict['masks'] = ptmasks
        data_dict['sequences_length'] = torch.ones([batch_size,1]).type(torch.CharTensor) * seq_length
        data_dict['num_subsequences'] = torch.ones([batch_size, 1]).type(torch.CharTensor)
        return data_dict


if __name__ == "__main__":
    """ Tests sequence generator - generates and displays a random sample"""

    # "Loaded parameters".
    from miprometheus.utils.param_interface import ParamInterface

    params = ParamInterface()
    params.add_config_params({#'control_bits': 2,
                              #'data_bits': 8,
                              #'antisymmetry': True,
                              'hard' : True,
                              'min_sequence_length': 3,
                              'max_sequence_length': 5})
    batch_size = 64

    # Create problem object.
    seqsymcl = SequenceSymmetryCommandLines(params)

    # get a sample
    sample = seqsymcl[0]
    print(repr(sample))
    print('__getitem__ works.')

    # wrap DataLoader on top
    from torch.utils.data.dataloader import DataLoader
示例#9
0
    batch_size = 44
    sequence_nr = 1

    # Timing test parameters
    timing_test = True
    testbatches = 100

    # -------------------------

    # Define useful params
    from miprometheus.utils.param_interface import ParamInterface
    params = ParamInterface()
    tasks = ['Go', 'CompareColor']
    params.add_config_params({
        'data_folder': os.path.expanduser('~/data/cog'),
        'set': 'val',
        'dataset_type': 'canonical',
        'tasks': tasks
    })

    # Create problem - task Go
    cog_dataset = COG(params)

    # Get a sample - Go
    sample = cog_dataset[0]
    print(repr(sample))

    # Test whether data structures match expected definitions
    assert sample['images'].shape == torch.ones((4, 3, 112, 112)).shape
    assert sample['tasks'] == ['Go']
    assert sample['questions'] == ['point now beige u']
    assert sample['targets_reg'].shape == torch.ones((4, 2)).shape
        return x_out


if __name__ == '__main__':
    """Unit test for the RelationalNetwork on SortOfCLEVR"""
    from miprometheus.utils.app_state import AppState
    from miprometheus.utils.param_interface import ParamInterface
    from torch.utils.data import DataLoader
    app_state = AppState()

    from miprometheus.problems.image_text_to_class.sort_of_clevr import SortOfCLEVR
    problem_params = ParamInterface()
    problem_params.add_config_params({
        'data_folder': '~/data/sort-of-clevr/',
        'split': 'train',
        'regenerate': False,
        'dataset_size': 10000,
        'img_size': 128
    })

    # create problem
    sort_of_clevr = SortOfCLEVR(problem_params)
    print('Problem {} instantiated.'.format(sort_of_clevr.name))

    # instantiate DataLoader object
    batch_size = 64
    problem = DataLoader(sort_of_clevr,
                         batch_size=batch_size,
                         collate_fn=sort_of_clevr.collate_fn)

    model_params = ParamInterface()
    def set_max_length(self, max_length):
        self.max_sequence_length = max_length


if __name__ == "__main__":
    """ Tests sequence generator - generates and displays a random sample"""

    # "Loaded parameters".
    from miprometheus.utils.param_interface import ParamInterface

    params = ParamInterface()
    params.add_config_params({
        'control_bits': 4,
        'data_bits': 8,
        'min_sequence_length': 1,
        'max_sequence_length': 10,
        'num_subseq_min': 1,
        'num_subseq_max': 4,
        'num_rotation': 0.5
    })
    batch_size = 64

    # Create problem object.
    interruptswaprecall = InterruptionSwapRecall(params)

    # get a sample
    sample = interruptswaprecall[0]
    print(repr(sample))
    print('__getitem__ works.')

    # wrap DataLoader on top
示例#12
0

if __name__ == "__main__":
    """Unit test that generates a batch and displays a sample."""

    from miprometheus.utils.param_interface import ParamInterface
    params = ParamInterface()
    params.add_config_params({
        'settings': {
            'data_folder': '~/data/CLEVR_v1.0',
            'set': 'train',
            'dataset_variant': 'CLEVR'
        },
        'images': {
            'raw_images': False,
            'feature_extractor': {
                'cnn_model': 'resnet101',
                'num_blocks': 4
            }
        },
        'questions': {
            'embedding_type': 'random',
            'embedding_dim': 300
        }
    })

    # create problem
    clevr_dataset = CLEVR(params)

    batch_size = 64

    sample = clevr_dataset[0]
示例#13
0
        plt.show()


if __name__ == '__main__':
    # Set visualization.
    from miprometheus.utils.app_state import AppState
    AppState().visualize = True

    from miprometheus.utils.param_interface import ParamInterface
    from torch.utils.data.dataloader import DataLoader
    from miprometheus.problems.image_to_class.mnist import MNIST

    problem_params = ParamInterface()
    problem_params.add_config_params({
        'use_train_data': True,
        'root_dir': '~/data/mnist',
        'padding': [0, 0, 0, 0],
        'up_scaling': False
    })
    batch_size = 64

    # create problem
    problem = MNIST(problem_params)
    print('Problem {} instantiated.'.format(problem.name))

    # instantiate DataLoader object
    dataloader = DataLoader(problem,
                            batch_size=batch_size,
                            collate_fn=problem.collate_fn)

    # Test base model.
    from miprometheus.utils.param_interface import ParamInterface
示例#14
0
        data_dict['num_subsequences'] = torch.ones([batch_size,
                                                    1]).type(torch.CharTensor)
        return data_dict


if __name__ == "__main__":
    """ Tests sequence generator - generates and displays a random sample"""

    # "Loaded parameters".
    from miprometheus.utils.param_interface import ParamInterface

    params = ParamInterface()
    params.add_config_params({  #'control_bits': 2,
        #'data_bits': 8,
        #'inequality': True,
        'hard': True,
        'min_sequence_length': 2,
        'max_sequence_length': 5
    })
    batch_size = 64

    # Create problem object.
    seqequacl = SequenceEqualityCommandLines(params)

    # get a sample
    sample = seqequacl[0]
    print(repr(sample))
    print('__getitem__ works.')

    # wrap DataLoader on top
    from torch.utils.data.dataloader import DataLoader
    # learning
    def set_max_length(self, max_length):
        self.max_sequence_length = max_length


if __name__ == "__main__":
    """ Tests sequence generator - generates and displays a random sample"""

    # "Loaded parameters".
    from miprometheus.utils.param_interface import ParamInterface

    params = ParamInterface()
    params.add_config_params({
        'control_bits': 2,
        'data_bits': 8,
        'min_sequence_length': 1,
        'max_sequence_length': 10,
        'num_items': 1
    })
    batch_size = 64

    # Create problem object.
    maniptempswap = ManipulationTemporalSwap(params)

    # get a sample
    sample = maniptempswap[0]
    print(repr(sample))
    print('__getitem__ works.')

    # wrap DataLoader on top
    from torch.utils.data import DataLoader
示例#16
0
    # learning
    def set_max_length(self, max_length):
        self.max_sequence_length = max_length


if __name__ == "__main__":
    """ Tests sequence generator - generates and displays a random sample"""

    # "Loaded parameters".
    from miprometheus.utils.param_interface import ParamInterface

    params = ParamInterface()
    params.add_config_params({'control_bits': 2,
                              'data_bits': 8,
                              'batch_size': 2,
                              'min_sequence_length': 1,
                              'max_sequence_length': 10,
                              'num_subseq_min': 4,
                              'num_subseq_max': 4})
    batch_size = 64

    # Create problem object.
    readingspan = ReadingSpan(params)

    # get a sample
    sample = readingspan[0]
    print(repr(sample))
    print('__getitem__ works.')

    # wrap DataLoader on top
    from torch.utils.data import DataLoader
示例#17
0

if __name__ == '__main__':
    """ Tests StackedAttentionNetwork on SortOfCLEVR"""

    # "Loaded parameters".
    from miprometheus.utils.param_interface import ParamInterface
    from miprometheus.utils.app_state import AppState
    app_state = AppState()
    app_state.visualize = True
    from miprometheus.problems.image_text_to_class.sort_of_clevr import SortOfCLEVR
    problem_params = ParamInterface()
    problem_params.add_config_params({
        'data_folder': '~/data/sort-of-clevr/',
        'split': 'train',
        'regenerate': False,
        'dataset_size': 10000,
        'img_size': 128
    })

    # create problem
    sortofclevr = SortOfCLEVR(problem_params)

    batch_size = 64

    # wrap DataLoader on top of this Dataset subclass
    from torch.utils.data.dataloader import DataLoader

    dataloader = DataLoader(dataset=sortofclevr,
                            collate_fn=sortofclevr.collate_fn,
                            batch_size=batch_size,
示例#18
0

if __name__ == '__main__':
    """ Tests CNN_LSTM on SortOfCLEVR"""

    # "Loaded parameters".
    from miprometheus.utils.param_interface import ParamInterface
    from miprometheus.utils.app_state import AppState
    app_state = AppState()
    app_state.visualize = True
    from miprometheus.problems.image_text_to_class.sort_of_clevr import SortOfCLEVR
    problem_params = ParamInterface()
    problem_params.add_config_params({
        'data_folder': '~/data/sort-of-clevr/',
        'split': 'train',
        'regenerate': False,
        'dataset_size': 10000,
        'img_size': 128
    })

    # create problem
    sortofclevr = SortOfCLEVR(problem_params)

    batch_size = 64

    # wrap DataLoader on top of this Dataset subclass
    from torch.utils.data import DataLoader

    dataloader = DataLoader(dataset=sortofclevr,
                            collate_fn=sortofclevr.collate_fn,
                            batch_size=batch_size,
示例#19
0
        data_dict['sequences_length'] = torch.ones([batch_size,1]).type(torch.CharTensor) * seq_length
        data_dict['num_subsequences'] = torch.ones([batch_size, 1]).type(torch.CharTensor)

        return data_dict


if __name__ == "__main__":
    """ Tests sequence generator - generates and displays a random sample"""

    # "Loaded parameters".
    from miprometheus.utils.param_interface import ParamInterface

    params = ParamInterface()
    params.add_config_params({#'control_bits': 4,
                              #'data_bits': 8,
                              'min_sequence_length': 1,
                              'max_sequence_length': 10,
                              'seq_start': 0,
                              'skip_step': 2})
    batch_size = 64

    # Create problem object.
    skiprecallcl = SkipRecallCommandLines(params)

    # get a sample
    sample = skiprecallcl[0]
    print(repr(sample))
    print('__getitem__ works.')

    # wrap DataLoader on top
    from torch.utils.data import DataLoader