Ejemplo n.º 1
0
    def restart_network(self):

        if self.traject == 'combine':
            print('MNIST Trajectory MINE network')
            self.net = networks.statistical_estimator(traject_max_depth = self.traject_max_depth,
                 traject_num_layers = self.traject_num_layers, traject_stride = self.traject_stride,
                 traject_kernel = self.traject_kernel, traject_padding = self.traject_padding,
                 traject_pooling = self.traject_pooling, number_descending_blocks=self.number_descending_blocks, 
                 number_repeating_blocks = self.number_repeating_blocks, repeating_blockd_size = self.repeating_blockd_size)
        elif self.traject == 'traject':
            print('Traject MINE network')
            self.net = networks.conv1d_classifier_(input_dim=[18,1000,0], output_size = 1,
                                                   max_depth = self.traject_max_depth, 
                                                   p_conv = 0, p_fc = 0, BN = False)
        else:
            print('MNIST MINE network')
            if self.net_num == 1:
                self.net = networks.statistical_estimator_DCGAN(input_size = 2, output_size = 1)
            elif self.net_num == 2:
                self.net = networks.statistical_estimator_DCGAN_2(input_size = 1, output_size = 1)
            else:
                self.net = networks.statistical_estimator_DCGAN_3(input_size = 1, output_size = 1,number_descending_blocks = self.number_descending_blocks, 
                     number_repeating_blocks=self.number_repeating_blocks, repeating_blockd_size = self.repeating_blockd_size)
            
        print('')
        print('Restarted Network')
        if self.optimizer == 1:
            self.mine_net_optim = optim.SGD(self.net.parameters(), lr = self.lr)
            print('Optimizer: SGD')
        elif self.optimizer == 2: 
            self.mine_net_optim = optim.Adam(self.net.parameters(), lr = self.lr)
            print('Optimizer: Adam')
        else:
            self.mine_net_optim = optim.RMSprop(self.net.parameters(), lr = self.lr)
            print('Optimizer: RMSprop')
        #self.scheduler = optim.lr_scheduler.StepLR(self.mine_net_optim, step_size=10*(len(self.dataset)/self.batch_size), gamma=self.gamma)
        print('Learning rate = {}'.format(self.lr))
        print('Batch size = {}'.format(self.batch_size))
        #print('Gamma of lr decay = {}'.format(self.gamma))
        print('Using Net {}'.format(self.net_num))
        if self.net_num == 3:
            print('Number of Descending Blocks is {}'.format(self.number_descending_blocks))
            print('Number of times to repeat a block = {}'.format(self.number_repeating_blocks))
            print('The fully connected layer to repeat - {}'.format(self.repeating_blockd_size))
            pickle.dump(test, f)
else:
    print('didnt find train.pickle')
    train, test = utils.create_train_test(dr=data_path)
    with open('train.pickle', 'wb') as f:
        pickle.dump(train, f)

    with open('test.pickle', 'wb') as f:
        pickle.dump(test, f)

net = networks.conv1d_classifier_(input_dim=[9, 1000, 0],
                                  output_size=10,
                                  p_conv=p_conv,
                                  p_fc=p_fc,
                                  max_depth=max_depth,
                                  num_layers=num_layers,
                                  repeating_block_depth=repeating_block_depth,
                                  repeating_block_size=repeating_block_size,
                                  stride=stride,
                                  kernel=kernel,
                                  padding=0,
                                  pooling=pooling)
print(net)
print('cuda availble?:', torch.cuda.is_available())
#Defining the optimizer
if type(optimizer) != int:
    optimizer = int(optimizer)
if optimizer == 1:
    optimizer = optim.SGD(net.parameters(), lr=lr)
    print('')
    print('Optimizer: SGD')
elif optimizer == 2:
Ejemplo n.º 3
0
 def __init__(self, train = True,traject = True, batch = 1000, lr = 3e-3, gamma = 0.001, optimizer=2, net_num = 3,
              traject_max_depth = 512, traject_num_layers = 6, traject_stride = [3,1],
              traject_kernel = 5, traject_padding = 0,
              traject_pooling = [1,2], number_descending_blocks = 3, 
              number_repeating_blocks=0, repeating_blockd_size=512,
              dataset_status = 'same'):
     self.net_num = net_num
     self.traject = traject
     self.dataset_status = dataset_status
     print(self.dataset_status)
     self.traject_max_depth = traject_max_depth 
     self.traject_num_layers = traject_num_layers 
     self.traject_stride = traject_stride
     self.traject_kernel = traject_kernel
     self.traject_padding = traject_padding
     self.traject_pooling = traject_pooling    
     self.number_descending_blocks = number_descending_blocks 
     self.number_repeating_blocks = number_repeating_blocks 
     self.repeating_blockd_size = repeating_blockd_size
     #Defining the netwrok:
     if self.traject == 'combined':
         print('MNIST Trajectory MINE network')
         self.net = networks.statistical_estimator(traject_max_depth = self.traject_max_depth,
                                                   traject_num_layers = self.traject_num_layers, 
                                                   traject_stride = self.traject_stride,
                                                   traject_kernel = self.traject_kernel, 
                                                   traject_padding = self.traject_padding,
                                                   traject_pooling = self.traject_pooling, 
                                                   number_descending_blocks=self.number_descending_blocks, 
                                                   number_repeating_blocks = self.number_repeating_blocks, 
                                                   repeating_blockd_size = self.repeating_blockd_size)
     elif self.traject == 'traject':
         print('Traject MINE network')
         self.net = networks.conv1d_classifier_(input_dim=[18,1000,0], output_size = 1,
                                                p_conv = 0, p_fc = 0, 
                                                max_depth = self.traject_max_depth, 
                                                num_layers = self.traject_num_layers, 
                                                conv_depth_type = 'decending',
                                                repeating_block_depth = 5, 
                                                repeating_block_size = 0,
                                                stride = self.traject_stride, 
                                                kernel = self.traject_kernel, 
                                                padding = self.traject_padding,
                                                pooling = self.traject_pooling, 
                                                BN = False)
     else:
         print('MNIST MINE network')
         if self.net_num == 1:
             self.net = networks.statistical_estimator_DCGAN(input_size = 2, output_size = 1)
         elif self.net_num == 2:
             self.net = networks.statistical_estimator_DCGAN_2(input_size = 1, output_size = 1)
         else:
             self.net = networks.statistical_estimator_DCGAN_3(input_size = 1, output_size = 1,number_descending_blocks = self.number_descending_blocks, 
                  number_repeating_blocks=self.number_repeating_blocks, repeating_blockd_size = self.repeating_blockd_size)
     
     #self.input1 = input1
     #self.input2 = input2
     self.lr = lr
     self.optimizer = optimizer
     if type(optimizer) != int:
         optimizer = int(optimizer)
     if self.optimizer == 1:
         self.mine_net_optim = optim.SGD(self.net.parameters(), lr = self.lr)
         print('')
         print('Optimizer: SGD')
     elif self.optimizer == 2:
         self.mine_net_optim = optim.Adam(self.net.parameters(), lr = self.lr)
         print('')
         print('Optimizer: Adam')
     else:
         self.mine_net_optim = optim.RMSprop(self.net.parameters(), lr = self.lr)
         print('')
         print('Optimizer: SGD')
     
     self.train_value = train
     #self.dataloader = torch.utils.data.DataLoader(self.dataset,  batch_size = batch, shuffle = True)    
     self.batch_size = batch
     
     #self.scheduler = optim.lr_scheduler.StepLR(self.mine_net_optim, step_size=10*(len(self.dataset)/self.batch_size), gamma=gamma)
     self.gamma = gamma
     #self.scheduler2 = optim.lr_scheduler.ReduceLROnPlateau(self.mine_net_optim, mode='max', factor=0.5, patience=10, verbose=False, threshold=0.0001, threshold_mode='abs', cooldown=0, min_lr=0, eps=1e-08)    
     self.results = []
     
     #print all variables of the system:
     print('Learning rate = {}'.format(self.lr))
     print('Batch size = {}'.format(self.batch_size))
     #print('Gamma of lr decay = {}'.format(self.gamma))
     print('Using Net {}'.format(self.net_num))
     if self.net_num == 3:
         print('Number of Descending Blocks is {}'.format(self.number_descending_blocks))
         print('Number of times to repeat a block = {}'.format(self.number_repeating_blocks))
         print('The fully connected layer to repeat - {}'.format(self.repeating_blockd_size))