Example #1
0
                    f,
                    datainfo.datafiles[0],
                    sensor,
                    nclusters,
                    globalc=args.globalclust)
                data = datainfo.get_peaks_resample_PCA(f,
                                                       datainfo.datafiles[0],
                                                       sensor)
                for i in range(nclusters):
                    variance[i] = np.std(data[labels == i], axis=0)

            lsignals = []

            mhisto = np.zeros((len(datainfo.datafiles) // batches, nclusters))
            cbatch = [c for c in enumerate(datainfo.datafiles)]
            lbatch = batchify(cbatch, batches)
            for nf, btch in enumerate(lbatch):
                npeaks = 0
                histo = np.zeros(nclusters)
                for _, dfile in btch:

                    labels = datainfo.compute_peaks_labels(
                        f, dfile, sensor, nclusters, globalc=args.globalclust)
                    npeaks += len(labels)
                    for i in labels:
                        mhisto[nf, i] += 1.0
                mhisto[nf] /= npeaks

            matplotlib.rcParams.update({'font.size': 20})
            fig = plt.figure()
            fig.suptitle(sensorname, fontsize=50)
Example #2
0
        f = datainfo.open_experiment_data(mode='r')

        for sensor, nclusters in zip(datainfo.sensors, datainfo.clusters):
            print(sensor, nclusters)

            if args.globalclust:
                centroids = datainfo.get_peaks_global_clustering_centroids(f, sensor, nclusters)
            else:
                centroids = datainfo.get_peaks_clustering_centroids(f, datainfo.datafiles[0], sensor, nclusters)

            lsignals = []

            mhisto = np.zeros((len(datainfo.datafiles)//batches, nclusters))
            cbatch = [c for c in enumerate(datainfo.datafiles)]
            lbatch = batchify(cbatch, batches)
            for nf, btch in enumerate(lbatch):
                npeaks = 0
                histo = np.zeros(nclusters)
                for _, dfile in btch:

                    labels = datainfo.compute_peaks_labels(f, dfile, sensor, nclusters, globalc=args.globalclust)
                    npeaks += len(labels)
                    for i in labels:
                        mhisto[nf, i] += 1.0
                mhisto[nf] /= npeaks

            matplotlib.rcParams.update({'font.size': 25})
            fig = plt.figure()
            fig.set_figwidth(24)
            fig.set_figheight(12)
Example #3
0
        lexperiments = ['e150514']
        mbasal =  'meanfirst' # 'alternative'
        altsmooth = False
        args.wavy = True
        args.extra = True

    print('Begin Smoothing: ', time.ctime())
    for expname in lexperiments:
        datainfo = experiments[expname]

        if not args.extra:
            lsensors = datainfo.sensors
        else:
            lsensors = datainfo.extrasensors

        batches = batchify([i for i in product(datainfo.datafiles, lsensors)], njobs)

        if 'recenter' in datainfo.peaks_smooth:
            # If recenter is true a subwindow of the data has to be indicated to be able to re-crop the signal
            recenter = datainfo.peaks_smooth['recenter']
            wtsel = datainfo.peaks_smooth['wtsel']
        else:
            recenter = False
            wtsel = None


        for batch in batches:
            # Paralelize PCA computation
            res = Parallel(n_jobs=-1)(
                    delayed(do_the_job)(dfile, sensor, recenter=False, wtsel=None, clean=False, mbasal=mbasal, alt_smooth=altsmooth, wavy=args.wavy) for dfile, sensor in batch)
Example #4
0
        mbasal = 'meanfirst'  # 'globalmeanfirst' # 'alternative'
        args.altsmooth = False
        args.wavy = False
        args.extra = False
        args.pca = 0.98

    print('Begin Smoothing: ', time.ctime())
    for expname in lexperiments:
        datainfo = experiments[expname]

        if not args.extra:
            lsensors = datainfo.sensors
        else:
            lsensors = datainfo.extrasensors

        batches = batchify([i for i in product(datainfo.datafiles, lsensors)],
                           njobs)

        if 'recenter' in datainfo.peaks_smooth:
            # If recenter is true a subwindow of the data has to be indicated to be able to re-crop the signal
            recenter = datainfo.peaks_smooth['recenter']
            wtsel = datainfo.peaks_smooth['wtsel']
        else:
            recenter = False
            wtsel = None

        for batch in batches:
            # Paralelize PCA computation
            res = Parallel(n_jobs=-1)(
                delayed(do_the_job)(dfile,
                                    sensor,
                                    recenter=False,