Ejemplo n.º 1
0
 def generate_raw(self):
     ds = datestr(self.task.created_at)
     uuid = self.task.uuid
     fn = '/'.join([self.exec_dir(), 'raw'])
     with open(fn, 'w') as f:
         f.write('''<vddb>
                      <query>
                        <return_code>0</return_code>
                        <extra_info>Success</extra_info>
                      </query>
                      <matches size="%s" status="0">''' %
                 (len(self.result['matches']), ))
         for m in self.result['matches']:
             t = 'audio' if m['video_duration'] == 0 else 'video'
             f.write(
                 '''<match>
                          <master_uuid>%s</master_uuid>
                          <master_name>%s</master_name>
                          <instance_id>%s</instance_id>
                          <instance_name>%s</instance_name>
                          <track_type>%s</track_type>
                          <track_id>%s</track_id>
                          <match_duration>%s</match_duration>
                          <score>%s</score>
                          <reference_offset>%s</reference_offset>
                          <sample_offset>%s</sample_offset>
                        </match>''' %
                 (str(m['meta_uuid']), m['meta_name'].encode('utf-8'),
                  str(m['instance_id']), m['instance_name'].encode('utf-8'),
                  str(m['match_type']), str(m['track_id']),
                  str(m[t + '_duration']), str(m[t + '_score']),
                  str(m[t + '_ref_offset']), str(m[t + '_sample_offset'])))
         f.write('''  </matches>
                    </vddb>''')
Ejemplo n.º 2
0
 def generate_raw(self):
     ds = datestr(self.task.created_at)
     uuid = self.task.uuid
     fn = '/'.join([self.exec_dir(), 'raw'])
     with open(fn, 'w') as f:
         f.write('''<vddb>
                      <query>
                        <return_code>0</return_code>
                        <extra_info>Success</extra_info>
                      </query>
                      <matches size="%s" status="0">'''
                 % (len(self.result['matches']),))
         for m in self.result['matches']:
             t = 'audio' if m['video_duration'] == 0 else 'video'
             f.write('''<match>
                          <master_uuid>%s</master_uuid>
                          <master_name>%s</master_name>
                          <instance_id>%s</instance_id>
                          <instance_name>%s</instance_name>
                          <track_type>%s</track_type>
                          <track_id>%s</track_id>
                          <match_duration>%s</match_duration>
                          <score>%s</score>
                          <reference_offset>%s</reference_offset>
                          <sample_offset>%s</sample_offset>
                        </match>''' % (str(m['meta_uuid']), m['meta_name'].encode('utf-8'),
                                       str(m['instance_id']), m['instance_name'].encode('utf-8'),
                                       str(m['match_type']), str(m['track_id']),
                                       str(m[t + '_duration']), str(m[t + '_score']),
                                       str(m[t + '_ref_offset']),
                                       str(m[t + '_sample_offset'])))
         f.write('''  </matches>
                    </vddb>''')
Ejemplo n.º 3
0
 def query_dir(self, backend):
     ds = datestr(self.task.created_at)
     uuid = self.task.uuid
     task_dir = '/'.join([self.path, 'var', 'tmp', ds, uuid])
     return '/'.join([task_dir, backend, self.timestr] +
                     (['hot'] if self.state.hot else []) +
                     (['slicing'] if self.state.slicing else []))
Ejemplo n.º 4
0
 def query_dir(self, backend):
     ds = datestr(self.task.created_at)
     uuid = self.task.uuid
     task_dir = '/'.join([self.path, 'var', 'tmp', ds, uuid])
     return '/'.join([task_dir, backend, self.timestr] +
                     (['hot'] if self.state.hot else []) +
                     (['slicing'] if self.state.slicing else []))
Ejemplo n.º 5
0
 def dna_file(self):
     if self.state.slicing:
         ds = datestr(self.task.created_at)
         uuid = self.task.uuid
         task_dir = '/'.join([self.path, 'var', 'tmp', ds, uuid])
         # all backends can share the same sliced dna
         return '/'.join([task_dir, self.timestr, 'sliced.dna'])
     else:
         return self.dna
Ejemplo n.º 6
0
 def dna_file(self):
     if self.state.slicing:
         ds = datestr(self.task.created_at)
         uuid = self.task.uuid
         task_dir = '/'.join([self.path, 'var', 'tmp', ds, uuid])
         # all backends can share the same sliced dna
         return '/'.join([task_dir, self.timestr, 'sliced.dna'])
     else:
         return self.dna
Ejemplo n.º 7
0
    def __init__(self, task, account, backend, site_file, timestr):
        self.path = getenv('MW_HOME')
        assert self.path != None
        self.logger = logging.getLogger('mwtm_query')

        self.task = task
        self.account = account
        self.backend = backend
        # self.parse_extra(backend.extra)
        self.site_file = site_file
        self.timestr = timestr

        ds = datestr(self.task.created_at)
        uuid = self.task.uuid
        self.dna = '/'.join([self.path, 'var', 'tmp', ds, uuid, timestr, 'merge.dna'])

        self.scope = None
        self.state = None
        self.result = {}
Ejemplo n.º 8
0
    def __init__(self, task, account, backend, site_file, timestr):
        self.path = getenv('MW_HOME')
        assert self.path != None
        self.logger = logging.getLogger('mwtm_query')

        self.task = task
        self.account = account
        self.backend = backend
        # self.parse_extra(backend.extra)
        self.site_file = site_file
        self.timestr = timestr

        ds = datestr(self.task.created_at)
        uuid = self.task.uuid
        self.dna = '/'.join(
            [self.path, 'var', 'tmp', ds, uuid, timestr, 'merge.dna'])

        self.scope = None
        self.state = None
        self.result = {}
Ejemplo n.º 9
0
 def task_dir(self):
     ds = datestr(self.task.created_at)
     uuid = self.task.uuid
     return '/'.join([self.path, 'var', 'cache', ds, uuid[6:8], uuid[21:23],
                      uuid])
Ejemplo n.º 10
0
 def exec_dir(self):
     ds = datestr(self.task.created_at)
     uuid = self.task.uuid
     return '/'.join([self.path, 'var', 'tmp', ds, uuid, self.timestr])
Ejemplo n.º 11
0
def main():

    parser = argparse.ArgumentParser()
    parser.add_argument('--batchSz', type=int, default=5)
    parser.add_argument('--dice', action='store_true', default=True)
    parser.add_argument('--nEpochs', type=int, default=600)
    parser.add_argument('--inChannels', type=int, default=2)
    parser.add_argument('--start-epoch',
                        default=0,
                        type=int,
                        metavar='N',
                        help='manual epoch number (useful on restarts)')
    parser.add_argument('--resume',
                        default='',
                        type=str,
                        metavar='PATH',
                        help='path to latest checkpoint (default: none)')

    parser.add_argument('--fold_id',
                        default='1',
                        type=str,
                        help='Select subject for fold validation')

    parser.add_argument('--weight-decay',
                        '--wd',
                        default=1e-8,
                        type=float,
                        metavar='W',
                        help='weight decay (default: 1e-8)')

    parser.add_argument('--lr',
                        default=1e-3,
                        type=float,
                        help='learning rate (default: 1e-3)')

    parser.add_argument('--cuda', action='store_true', default=True)
    parser.add_argument('--save')
    parser.add_argument('--model',
                        type=str,
                        default='UNET3D',
                        choices=('VNET', 'VNET2', 'UNET3D', 'DENSENET1',
                                 'DENSENET2', 'DENSENET3', 'HYPERDENSENET'))
    parser.add_argument('--opt',
                        type=str,
                        default='sgd',
                        choices=('sgd', 'adam', 'rmsprop'))

    args = parser.parse_args()
    best_prec1 = 100.
    DIM = (128, 128, 32)

    training_generator, val_generator = generate_datasets(DIM,
                                                          fold_id=args.fold_id,
                                                          samples_train=500,
                                                          samples_val=10)

    args.cuda = args.cuda and torch.cuda.is_available()

    torch.manual_seed(1777777)
    if args.cuda:
        torch.cuda.manual_seed(1777777)

    args.save = args.model + '_checkpoints/' + args.model + '_base_{}_fold_id_{}'.format(
        utils.datestr(), args.fold_id)

    if os.path.exists(args.save):
        shutil.rmtree(args.save)
        os.mkdir(args.save)
    else:
        os.makedirs(args.save)

    weight_decay = args.weight_decay

    print("Building Model . . . . . . . .")

    if args.model == 'VNET2':
        model = medical_zoo.VNetLight(elu=False, nll=False)
    elif (args.model == 'UNET3D'):
        model = medical_zoo.UNet3D(in_channels=2, n_classes=4)

    if args.resume:
        if os.path.isfile(args.resume):
            print("=> loading checkpoint '{}'".format(args.resume))
            checkpoint = torch.load(args.resume)
            args.start_epoch = checkpoint['epoch']
            best_prec1 = checkpoint['best_prec1']
            model.load_state_dict(checkpoint['state_dict'])
            print("=> loaded checkpoint '{}' (epoch {})".format(
                args.evaluate, checkpoint['epoch']))
        else:
            print("=> no checkpoint found at '{}'".format(args.resume))

    if args.cuda:
        model = model.cuda()
        print("Model transferred in GPU.....")

    print('Number of params: {}'.format(
        sum([p.data.nelement() for p in model.parameters()])))
    if args.opt == 'sgd':
        optimizer = optim.SGD(model.parameters(),
                              lr=args.lr,
                              momentum=0.5,
                              weight_decay=weight_decay)
    elif args.opt == 'adam':
        optimizer = optim.Adam(model.parameters(),
                               lr=args.lr,
                               weight_decay=weight_decay)
    elif args.opt == 'rmsprop':
        optimizer = optim.RMSprop(model.parameters(),
                                  lr=args.lr,
                                  weight_decay=weight_decay)

    train_f = open(os.path.join(args.save, 'train.csv'), 'w')
    val_f = open(os.path.join(args.save, 'val.csv'), 'w')

    criterion = medical_zoo.DiceLoss(idx_to_ignore_after=4)

    print("START TRAINING...")
    for epoch in range(1, args.nEpochs + 1):

        train_dice(args, epoch, model, training_generator, optimizer,
                   criterion, train_f)

        dice_loss = test_dice(args, epoch, model, val_generator, optimizer,
                              criterion, val_f)

        is_best = False
        if dice_loss < best_prec1:
            is_best = True
            best_prec1 = dice_loss

            utils.save_checkpoint(
                {
                    'epoch': epoch,
                    'state_dict': model.state_dict(),
                    'best_prec1': best_prec1
                }, is_best, args.save, args.model + "_best")
        else:
            utils.save_checkpoint(
                {
                    'epoch': epoch,
                    'state_dict': model.state_dict(),
                    'best_prec1': best_prec1
                }, is_best, args.save, args.model + "_last")
    train_f.close()
    val_f.close()
Ejemplo n.º 12
0
def make_rdm_multiple_hierarchy(name,
                                num_networks,
                                title="-",
                                save_files=True,
                                file_save_name=None,
                                skips=[]):
    if file_save_name == None:
        file_save_name = name
    # Make one rdm for each network
    rdmatrices_left = []
    rdmatrices_right = []
    for i in range(num_networks + len(skips)):
        # Skip number
        if skips is not None and i in skips:
            continue
        model = utils.load_object(name, i)
        hidden, _ = accuracy_test_with_goals(model, i)

        # Turn a list of tensors into a list of np vectors
        for i, tensor in enumerate(hidden):
            hidden[i] = tensor.numpy().reshape(-1)

        # Now cut that in two and make an RDM for each
        cutoff = int(len(hidden[0]) // 2)
        left_units = [vector[:cutoff] for vector in hidden]
        rdm_left = analysis.rdm_spearman(left_units)
        rdmatrices_left.append(rdm_left)

        right_units = [vector[cutoff:] for vector in hidden]
        rdm_right = analysis.rdm_spearman(right_units)
        rdmatrices_right.append(rdm_right)

    matrices = []
    # Do the same processing for each side (low level/left and high_level/right)
    for side in [[rdmatrices_left, "_goals"], [rdmatrices_right, "_actions"]]:
        # Now average over all matrices
        avg_matrix = None
        for matrix in side[0]:
            if avg_matrix is None:
                avg_matrix = matrix
            else:
                avg_matrix += matrix
        avg_matrix = avg_matrix / num_networks

        # delete the unwanted rows and columns:
        avg_matrix = np.delete(avg_matrix, [0, 6, 12], 0)
        avg_matrix = np.delete(avg_matrix, [0, 6, 12], 1)
        nps = 5  # number of elements per sequence

        side_name = file_save_name + side[1]
        np.savetxt(side_name + "_rdm_mat" + utils.datestr() + ".txt",
                   avg_matrix,
                   delimiter="\t",
                   fmt='%.2e')
        labels = []
        for i, sequence in enumerate(seqs):
            for action in sequence[1:-1]:
                labels.append(str(i) + '_' + action)

        analysis.plot_rdm(avg_matrix, labels,
                          title + side_name + " spearman rho matrix")
        if save_files:
            plt.savefig(side_name + '_rdm' + utils.datestr())
        plt.clf()

        #        nps = 5  # number of elements per sequence
        mdsy = analysis.mds(avg_matrix)
        for i, style in enumerate(['ro-', 'b|--', 'gx-.']):
            analysis.plot_mds_points(mdsy[nps * i:nps * i + nps],
                                     range(nps),
                                     labels=labels[nps * i:nps * i + nps],
                                     style=style)
        plt.title(title + side_name)
        if save_files:
            plt.savefig(side_name + '_mds' + utils.datestr())
        plt.clf()
        matrices.append(avg_matrix)
    return matrices
Ejemplo n.º 13
0
def make_rdm_multiple_special(name1,
                              name2,
                              num_networks,
                              file_save_name,
                              title,
                              skips1=[],
                              skips2=[]):
    mats = []
    for name, skips in [(name1, skips1), (name2, skips2)]:
        print(name)
        # Make one rdm for each network
        rdmatrices_left = []
        rdmatrices_right = []
        for i in range(num_networks + len(skips)):
            # Skip number
            if skips is not None and i in skips:
                continue
            model = utils.load_object(name, i)
            if name == name1:  # Have to do the goal ones first.
                hidden = accuracy_test_reg_hierarchy(model, i)
            else:
                hidden = accuracy_test_reg_hierarchy_nogoals(model, i)

            # Turn a list of tensors into a list of np vectors
            for i, tensor in enumerate(hidden):
                hidden[i] = tensor.numpy().reshape(-1)

            # Now cut that in two and make an RDM for each
            cutoff = int(len(hidden[0]) // 2)
            left_units = [vector[:cutoff] for vector in hidden]
            rdm_left = analysis.rdm_spearman(left_units)
            rdmatrices_left.append(rdm_left)

            right_units = [vector[cutoff:] for vector in hidden]
            rdm_right = analysis.rdm_spearman(right_units)
            rdmatrices_right.append(rdm_right)

        # Do the same processing for each side (low level/left and high_level/right)
        for side in [rdmatrices_left, rdmatrices_right]:  # goals, then actions
            # Now average over all matrices
            avg_matrix = None
            for matrix in side:
                if avg_matrix is None:
                    avg_matrix = matrix
                else:
                    avg_matrix += matrix
            avg_matrix = avg_matrix / num_networks
            mats.append(avg_matrix)

    # Now average the average matrices
    rdmatrices_left = (mats[0] + mats[2]) / 2
    rdmatrices_right = (mats[1] + mats[3]) / 2

    # Do the same processing for each side (low level/left and high_level/right)
    for side in [[rdmatrices_left, "_goals"], [rdmatrices_right, "_actions"]]:
        matrix = side[0]
        side_name = file_save_name + side[1]
        np.savetxt(side_name + "_rdm_mat" + utils.datestr() + ".txt",
                   matrix,
                   delimiter="\t",
                   fmt='%.2e')
        labels = []
        for i, sequence in enumerate(pnas2018task.seqs):
            for action in sequence[1:]:
                labels.append(str(i) + '_' + action)
        analysis.plot_rdm(matrix, labels,
                          title + side_name + " spearman rho matrix")
        plt.savefig(side_name + '_rdm' + utils.datestr())
        plt.clf()

        mdsy = analysis.mds(matrix)
        for i, style in enumerate(['ro-', 'b|--', 'gx-.', 'k_:']):
            analysis.plot_mds_points(mdsy[6 * i:6 * i + 6],
                                     range(6),
                                     labels=labels[6 * i:6 * i + 6],
                                     style=style)
        plt.title(title + side_name)
        plt.savefig(side_name + '_mds' + utils.datestr())
        plt.clf()
Ejemplo n.º 14
0
def make_rdm_multiple(name,
                      num_networks,
                      type="spearman",
                      with_goals=False,
                      title="-",
                      save_files=True,
                      skips=[]):
    # Make one rdm for each network
    optimal_list = []
    rdmatrices = []
    error_per_step = np.zeros((3, 6, 11), dtype=np.float32)  # number of steps
    hidden_avg = []
    hiddens = np.zeros((3, 6), dtype=np.float32)
    for i in range(num_networks + len(skips)):
        if i in skips:
            continue
        model = utils.load_object(name, i)
        if with_goals:
            hidden, optimal = accuracy_test_with_goals(model, i)
        else:
            hidden, optimal, error_per_step_model = accuracy_test(model, i)
            error_per_step += error_per_step_model
        optimal_list.append(optimal)
        # Turn into a list of simple vectors
        for j, tensor in enumerate(hidden):
            hidden[j] = tensor.numpy().reshape(-1)
            hidden_avg.append(np.average(
                hidden[j]))  # Get the average activation for that time-step
        if type == "spearman":
            rdmatrix = analysis.rdm_spearman(hidden)
        elif type == "euclidian":
            rdmatrix = analysis.rdm_euclidian(hidden)
        else:
            raise Exception("RDM type " + type + " not implemented")
        rdmatrices.append(rdmatrix)

    # Now
    i = j = 0
    for act_avg in hidden_avg:
        hiddens[i, j] += act_avg
        j += 1
        if j > 5:
            j = 0
            i += 1
            if i > 2:
                i = 0
                j = 0
    print(hiddens / num_networks)
    print("{0} networks, of which {1} achieve optimal accuracy".format(
        num_networks, optimal_list.count(True)))

    # Hidden activation per step averages
    #for i, hidden_act in enumerate(hidden):
    #    hidden[i] = np.average(hidden_act)
    #hidden_avg = hidden.reshape(18, 11)
    #print(enumerate(hidden_avg))

    # Error per step averages
    error_avg = error_per_step / 100.
    error_avg = error_avg.reshape(18, 11)
    print(error_avg)

    # Now average over all matrices
    avg_matrix = None
    for matrix in rdmatrices:
        if avg_matrix is None:
            avg_matrix = matrix
        else:
            avg_matrix += matrix
    avg_matrix = avg_matrix / num_networks

    # delete the unwanted rows and columns:
    avg_matrix = np.delete(avg_matrix, [0, 6, 12], 0)
    avg_matrix = np.delete(avg_matrix, [0, 6, 12], 1)
    nps = 5  # number of elements per sequence

    if save_files:
        np.savetxt(name + "_rdm_mat_" + type + utils.datestr() + ".txt",
                   avg_matrix,
                   delimiter="\t",
                   fmt='%.2e')
        np.savetxt(name + "_errors" + utils.datestr() + ".txt",
                   error_avg,
                   delimiter="\t",
                   fmt='%.2e')
    labels = []
    for i, sequence in enumerate(seqs):
        for action in sequence[1:-1]:
            labels.append(str(i) + '_' + action)
    analysis.plot_rdm(avg_matrix, labels, title + " " + type + " matrix")
    if save_files:
        plt.savefig(name + '_rdm_' + type)
    plt.clf()

    mdsy = analysis.mds(avg_matrix)
    for i, style in enumerate(['ro-', 'b|--', 'gx-.']):
        analysis.plot_mds_points(mdsy[nps * i:nps * i + nps],
                                 range(nps),
                                 labels=labels[nps * i:nps * i + nps],
                                 style=style)
    plt.title(title)
    if save_files:
        plt.savefig(name + '_mds')
    plt.clf()
    return avg_matrix
Ejemplo n.º 15
0
 def task_dir(self):
     ds = datestr(self.task.created_at)
     uuid = self.task.uuid
     return '/'.join(
         [self.path, 'var', 'cache', ds, uuid[6:8], uuid[21:23], uuid])
Ejemplo n.º 16
0
 def exec_dir(self):
     ds = datestr(self.task.created_at)
     uuid = self.task.uuid
     return '/'.join([self.path, 'var', 'tmp', ds, uuid, self.timestr])
Ejemplo n.º 17
0
def make_rdm_multiple_predictive(name,
                                 num_networks,
                                 type='sigmoid',
                                 title="-",
                                 save_files=True,
                                 skips=[]):
    # Make one rdm for each network
    optimal_list = []
    rdmatrices = []
    for i in range(num_networks + len(skips)):
        if i in skips:
            continue
        model = utils.load_object(name, i)
        hidden, optimal = accuracy_test_predictive(model, i, type=type)
        optimal_list.append(optimal)
        # Turn into a list of simple vectors
        for i, tensor in enumerate(hidden):
            hidden[i] = tensor.numpy().reshape(-1)
        if optimal:
            rdmatrix = analysis.rdm_spearman(hidden)
            rdmatrices.append(rdmatrix)
    print("{0} networks, of which {1} achieve optimal accuracy".format(
        num_networks, optimal_list.count(True)))
    # Now average over all matrices
    avg_matrix = None
    for matrix in rdmatrices:
        if avg_matrix is None:
            avg_matrix = matrix
        else:
            avg_matrix += matrix
    avg_matrix = avg_matrix / len(rdmatrices)

    # delete the unwanted rows and columns:
    avg_matrix = np.delete(avg_matrix, [0, 6, 12], 0)
    avg_matrix = np.delete(avg_matrix, [0, 6, 12], 1)
    nps = 5  # number of elements per sequence

    if save_files:
        np.savetxt(name + "_rdm_mat" + utils.datestr() + ".txt",
                   avg_matrix,
                   delimiter="\t",
                   fmt='%.2e')
    labels = []
    for i, sequence in enumerate(seqs):
        for action in sequence[1:-1]:
            labels.append(str(i) + '_' + action)
    analysis.plot_rdm(avg_matrix, labels, title + " spearman rho matrix")
    if save_files:
        plt.savefig(name + '_rdm')
    plt.clf()

    mdsy = analysis.mds(avg_matrix)
    for i, style in enumerate(['ro-', 'b|--', 'gx-.']):
        analysis.plot_mds_points(mdsy[nps * i:nps * i + nps],
                                 range(nps),
                                 labels=labels[nps * i:nps * i + nps],
                                 style=style)
    plt.title(title)
    if save_files:
        plt.savefig(name + '_mds')
    plt.clf()
    return avg_matrix
Ejemplo n.º 18
0
def make_rdm_multiple_hierarchy_nogoals(name,
                                        num_networks,
                                        title="-",
                                        save_files=True,
                                        file_save_name=None,
                                        cutoff=None):
    if file_save_name == None:
        file_save_name = name
    # Make one rdm for each network
    rdmatrices_left = []
    rdmatrices_right = []
    for i in range(num_networks):
        if i == 19:
            continue
        model = utils.load_object(name, i)
        hidden = accuracy_test_reg_hierarchy_nogoals(model, i)

        # Turn a list of tensors into a list of np vectors
        for i, tensor in enumerate(hidden):
            hidden[i] = tensor.numpy().reshape(-1)

        # Now cut that in two and make an RDM for each
        if cutoff is None:
            cutoff = int(len(hidden[0]) // 2)
        left_units = [vector[:cutoff] for vector in hidden]
        rdm_left = analysis.rdm_spearman(left_units)
        rdmatrices_left.append(rdm_left)

        right_units = [vector[cutoff:] for vector in hidden]
        rdm_right = analysis.rdm_spearman(right_units)
        rdmatrices_right.append(rdm_right)

    # Do the same processing for each side (low level/left and high_level/right)
    for side in [[rdmatrices_left, "_goals"], [rdmatrices_right, "_actions"]]:
        # Now average over all matrices
        avg_matrix = None
        for matrix in side[0]:
            if avg_matrix is None:
                avg_matrix = matrix
            else:
                avg_matrix += matrix
        avg_matrix = avg_matrix / num_networks
        side_name = file_save_name + side[1]
        np.savetxt(side_name + "_rdm_mat" + utils.datestr() + ".csv",
                   avg_matrix,
                   delimiter=",")
        labels = []
        for i, sequence in enumerate(pnas2018task.seqs):
            for action in sequence[1:]:
                labels.append(str(i) + '_' + action)
        analysis.plot_rdm(avg_matrix, labels,
                          title + side_name + " spearman rho matrix")
        if save_files:
            plt.savefig(side_name + '_rdm' + utils.datestr())
        plt.clf()

        mdsy = analysis.mds(avg_matrix)
        for i, style in enumerate(['ro-', 'b|--', 'gx-.', 'k_:']):
            analysis.plot_mds_points(mdsy[6 * i:6 * i + 6],
                                     range(6),
                                     labels=labels[6 * i:6 * i + 6],
                                     style=style)
        plt.title(title + side_name)
        if save_files:
            plt.savefig(side_name + '_mds' + utils.datestr())
        plt.clf()