コード例 #1
0
    from preprocess import Datasets
    import torch.utils.data
    from preprocess.transforms import SpectrogramTransform
    from preprocess.fusion import separate_sensors_collate
    from param import fs, duration_window, duration_overlap, spectro_batch_size

    spectrogram_transform = SpectrogramTransform(["Acc_norm", "Gyr_y"], fs, duration_window, duration_overlap,
                                                 spectro_batch_size, interpolation='linear', log_power=True, out_size=(48,48))
    collate_fn = separate_sensors_collate

    try :  # do not reload the datasets if they already exist
        train_dataset
        val_dataset

    except NameError:
        train_dataset = Datasets.SignalsDataSet(mode='train', split='balanced', comp_preprocess_first=True, transform=spectrogram_transform)
        val_dataset =   Datasets.SignalsDataSet(mode='val',   split='balanced', comp_preprocess_first=True, transform=spectrogram_transform)

    train_dataloader = torch.utils.data.DataLoader(train_dataset, batch_size=64, collate_fn=collate_fn, num_workers=0, shuffle=True)
    val_dataloader   = torch.utils.data.DataLoader(val_dataset,   batch_size=64, collate_fn=collate_fn, num_workers=0)

    str_result = ""


    model = DecorrelatedNet(input_shape=(1,48,48), signals_list=["Acc_norm", "Gyr_y"], loss_coef=0.1,
                            plot_conflict=True, cca_type='deep')

    model.to(device)
    model.adapt_CCA(train_dataloader)

    _, _, _, val_F1 = model.train_process(train_dataloader, val_dataloader, maxepochs=10)
コード例 #2
0
if __name__ == "__main__":
    """
        test the online (comp_preprocess_first=False) preprocessing
        load the Train Set
        apply the preprocess on the first 5 samples

    """

    print(
        '\n\n *** test the online (comp_preprocess_first=False) preprocessing *** \n'
    )

    n_classes = len(classes_names)
    # We will need this for the tests
    DS = Datasets.SignalsDataSet(mode='train',
                                 split='balanced',
                                 comp_preprocess_first=False)

    flag_debug = True
    example_signals = ["Acc_norm", "Gyr_y", "Mag_norm"]
    n_signals = len(example_signals)

    # ---------------------- temporal ----------------------------
    temporal_transform = TemporalTransform(example_signals)
    DS.transform = temporal_transform
    dataloader = torch.utils.data.DataLoader(
        DS, batch_size=5)  # instances will be loaded 5 by 5

    plt.figure()

    #                        axis = time
コード例 #3
0
    import sys
    sys.path.append("..")

import numpy as np
import torch
import scipy.signal, scipy.interpolate, scipy.ndimage


from param import classes_names, fs, duration_window, duration_overlap, duration_segment, spectro_batch_size
from preprocess import Datasets

if __name__ == "__main__":
    import matplotlib.pyplot as plt
    n_classes = len(classes_names)
    # We will need this for the tests
    DS = Datasets.SignalsDataSet(mode='train', split='balanced', comp_preprocess_first=False)


#%% transform functions

"""In all following functions, the input parameter (data) is, by default,
 a dict of numpy arrays, containing signal names (eg. "Gyr_z") as keys, and 1-dimensional
 arrays as values

Most of this part contains basic visualizations to make sure the preprocessing is correct"""




class TemporalTransform():
    """  create the base transform to use to each element of the data
コード例 #4
0
def create_dataloaders(split,
                       data_type,
                       fusion_type,
                       signals_list,
                       log_power="missing",
                       out_size="missing",
                       interpolation="missing",
                       comp_preprocess_first=True,
                       use_test=False):
    """
    generate the training, validation, and test sets with the given parameters,
    and returns the corresponding dataloaders

    Parameters
    ----------
    see above for inut type and constraints
    - log_power, out_size, and interpolation are only mandatory when
        data_type == "spectrogram", and can be left ignored otherwise
    - comp_preprocess_first is False by default
    - use_test (bool): if False, do not generate test dataloader. Default: False


    Returns
    -------
    train_dataloader, val_dataloader, test_dataloader
        tuple of torch.utils.data.DataLoader objects
        if use_test == False, test_dataloader is replaced with an empty list.

    """
    print("create_dataloaders", signals_list)

    if data_type in ["temporal", "FFT"]:
        if data_type == "temporal":
            transform_fn = transforms.TemporalTransform(
                remove_duplicates(signals_list))

        else:  #data_type == "FFT":
            transform_fn = transforms.FFTTransform(
                remove_duplicates(signals_list))

    elif data_type == "spectrogram":
        transform_fn = transforms.SpectrogramTransform(
            remove_duplicates(signals_list), fs, duration_window,
            duration_overlap, spectro_batch_size, interpolation, log_power,
            out_size)

    if fusion_type in ["time", "freq", "depth"]:
        collate_fn = fusion.ConcatCollate(fusion_type,
                                          list_signals=signals_list)
    elif fusion_type in [
            "probas", "scores", "weighted_probas", "weighted_scores", "GBlend",
            "learn2combine", "decorrelated_classic", "decorrelated_deep"
    ]:
        collate_fn = fusion.separate_sensors_collate
    elif fusion_type in [
            "features", "bottleneck", "attention", "selective_fusion"
    ]:
        collate_fn = fusion.ConcatCollate("depth", list_signals=signals_list)
        # a 'depth' collate can be used for feature concatenation (intermediate fusion)
        # thanks to the 'group' argument of convolutional layers
        # see the documentation of basic_CNN for complete explanations

    train_dataset = Datasets.SignalsDataSet(
        mode='train',
        split=split,
        comp_preprocess_first=comp_preprocess_first,
        transform=transform_fn)
    val_dataset = Datasets.SignalsDataSet(
        mode='val',
        split=split,
        comp_preprocess_first=comp_preprocess_first,
        transform=transform_fn)

    if use_test:
        test_dataset = Datasets.SignalsDataSet(
            mode='test',
            split=split,
            comp_preprocess_first=comp_preprocess_first,
            transform=transform_fn)

    batch_size = 64 if fusion_type != 'decorrelated_deep' else 512  # we need full-rank correlation matrices estimation for deep CCA
    train_dataloader = torch.utils.data.DataLoader(train_dataset,
                                                   batch_size=batch_size,
                                                   collate_fn=collate_fn,
                                                   shuffle=True)
    val_dataloader = torch.utils.data.DataLoader(val_dataset,
                                                 batch_size=batch_size,
                                                 collate_fn=collate_fn,
                                                 shuffle=use_test)
    if use_test:
        test_dataloader = torch.utils.data.DataLoader(test_dataset,
                                                      batch_size=batch_size,
                                                      collate_fn=collate_fn,
                                                      shuffle=True)
    else:
        test_dataloader = []

    return train_dataloader, val_dataloader, test_dataloader