def __init__(self, opt, type_of_data, mydir=None, noisiness=-1):
        MNISTDataset.__init__(self, opt, type_of_data, mydir)
        if noisiness == -1 or noisiness >= self.num_classes:
            raise Exception(
                'Noisiness -1 is not allowed, please provide a vaid arguement')
        self.noisiness = noisiness

        self.allign_data_for_func()
	def __init__(self, opt, type_of_data, mydir = None,\
		 additional_constraint_on_data = lambda a,b: True, \
		 relational_func = lambda a,b: a and b,\
		  domain_constraints = lambda a: True,
		  y_classes = 2):
		MNISTDataset.__init__(self, opt, type_of_data, mydir)
		self.additional_constraint_on_data = additional_constraint_on_data
		self.relational_func = relational_func
		self.domain_constraints = domain_constraints
		self.y_classes = y_classes

		self.allign_data_for_func(additional_constraint_on_data, relational_func, domain_constraints)
    def __init__(self,
                 opt,
                 type_of_data,
                 mydir=None,
                 additional_constraint_on_data=True,
                 relational_func=lambda a, b: a and b):
        MNISTDataset.__init__(self, opt, type_of_data, mydir)
        self.additional_constraint_on_data = additional_constraint_on_data
        self.relational_func = relational_func

        self.allign_data_for_func(additional_constraint_on_data,
                                  relational_func)
Example #4
0
    def __init__(self, opt, type_of_data, mydir=None):
        MNISTDataset.__init__(self, opt, type_of_data, mydir)

        self.allign_data_for_func()
def create_transfered(opt):

    type_of_data = 'test'

    mnist_train = MNISTDataset(opt,
                               'train',
                               trim_data=False,
                               image_size=(28, 28))
    mnist_valid = MNISTDataset(opt,
                               'valid',
                               trim_data=False,
                               image_size=(28, 28))

    mnist_input_data = np.concatenate((mnist_train.inputs, mnist_valid.inputs),
                                      axis=0)
    mnist_target_data = np.concatenate(
        (mnist_train.targets, mnist_valid.targets), axis=0)

    print(mnist_input_data.shape, mnist_target_data.shape)

    usps_train = USPSDataset(opt,
                             'train',
                             trim_data=False,
                             image_size=(28, 28))
    usps_valid = USPSDataset(opt,
                             'valid',
                             trim_data=False,
                             image_size=(28, 28))

    usps_input_data = np.concatenate((usps_train.inputs, usps_valid.inputs),
                                     axis=0)
    usps_target_data = np.concatenate((usps_train.targets, usps_valid.targets),
                                      axis=0)

    print(usps_input_data.shape, usps_input_data.shape)

    return
    possible_idxs_larger = list(range(len(larger_data_set)))
    possible_idxs_smaller = list(range(len(smaller_data_set)))
    inputs = []
    targets = []
    labels = []

    for i in range(len(smaller_data_set)):
        smaller_idx_idx = random.choice(range(len(possible_idxs_smaller)))
        smaller_idx = possible_idxs_smaller[smaller_idx_idx]
        smaller_label = smaller_data_set.targets[smaller_idx]
        # print(possible_idxs_smaller, smaller_idx, smaller_label)
        del possible_idxs_smaller[smaller_idx_idx]

        for larger_idx_idx, larger_idx in enumerate(possible_idxs_larger):
            larger_label = larger_data_set.targets[larger_idx]
            if smaller_label == larger_label:
                break

        if smaller_label != larger_label:
            continue

        del possible_idxs_larger[larger_idx_idx]

        inputs.append(smaller_data_set.inputs[smaller_idx])
        targets.append(larger_data_set.inputs[larger_idx])
        labels.append(smaller_label)

    usps = np.array(inputs)
    mnist = np.array(targets)
    labels = np.array(labels)

    print('smaller: {}, larger: {}'.format(smaller_data_set, larger_data_set))

    print('usps ', inputs.shape, ' mnist ', mnist.shape, ' label ',
          labels.shape)