Beispiel #1
0
 def loadinputdata(self, datafile, savefile=None):
     rdd = load(self.sc, datafile, preprocessmethod="dff-percentile")
     self.rdd = rdd
     self.datafile = datafile
     if savefile is not None:
         self.savefile = savefile
     self.modelfile = os.path.join(os.path.split(self.datafile)[0], 'stim')
 def test_ica(self):
     ica_data = os.path.join(DATA_DIR, "ica.txt")
     ica_results = os.path.join(DATA_DIR, "results/ica")
     data = load(self.sc, ica_data, "raw")
     w, sigs = ica(data, 4, 4, svdmethod="direct", seed=1)
     w_true = loadmat(os.path.join(ica_results, "w.mat"))["w"]
     sigs_true = loadmat(os.path.join(ica_results, "sigs.mat"))["sigs"]
     tol = 10e-02
     assert allclose(w, w_true, atol=tol)
     assert allclose(transpose(sigs.map(lambda (_, v): v).collect()), sigs_true, atol=tol)
Beispiel #3
0
 def test_ica(self):
     ica_data = os.path.join(DATA_DIR, "ica.txt")
     ica_results = os.path.join(DATA_DIR, "results/ica")
     data = load(self.sc, ica_data, "raw")
     w, sigs = ica(data, 4, 4, svdmethod="direct", seed=1)
     w_true = loadmat(os.path.join(ica_results, "w.mat"))["w"]
     sigs_true = loadmat(os.path.join(ica_results, "sigs.mat"))["sigs"]
     tol = 10e-02
     assert (allclose(w, w_true, atol=tol))
     assert (allclose(transpose(sigs.map(lambda (_, v): v).collect()),
                      sigs_true,
                      atol=tol))
Beispiel #4
0
    method = SigProcessingMethod.load("stats", statistic=statistic)
    vals = method.calc(data)

    return vals


if __name__ == "__main__":
    parser = argparse.ArgumentParser(description="compute summary statistics on time series data")
    parser.add_argument("master", type=str)
    parser.add_argument("datafile", type=str)
    parser.add_argument("outputdir", type=str)
    parser.add_argument("mode", choices=("mean", "median", "std", "norm"), help="which summary statistic")
    parser.add_argument("--preprocess", choices=("raw", "dff", "dff-highpass", "sub"), default="raw", required=False)

    args = parser.parse_args()
    egg = glob.glob(os.path.join(os.environ["THUNDER_EGG"], "*.egg"))
    sc = SparkContext(args.master, "ref", pyFiles=egg)

    data = load(sc, args.datafile, args.preprocess).cache()

    vals = stats(data, args.mode)

    outputdir = (args.outputdir + "-stats",)

    outputdir = args.outputdir + "-stats"
    if not os.path.exists(outputdir):
        os.makedirs(outputdir)

    save(vals, outputdir, "stats_" + args.mode, "matlab")
Beispiel #5
0
    return corr


if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        description="correlate time series with neighbors")
    parser.add_argument("master", type=str)
    parser.add_argument("datafile", type=str)
    parser.add_argument("outputdir", type=str)
    parser.add_argument("sz", type=int)
    parser.add_argument("--preprocess",
                        choices=("raw", "dff", "dff-highpass", "sub"),
                        default="raw",
                        required=False)

    args = parser.parse_args()

    sc = SparkContext(args.master, "localcorr")

    if args.master != "local":
        egg = glob.glob(os.path.join(os.environ['THUNDER_EGG'], "*.egg"))
        sc.addPyFile(egg[0])

    data = load(sc, args.datafile, args.preprocess).cache()

    corrs = localcorr(data, args.sz)

    outputdir = args.outputdir + "-localcorr"

    save(corrs, outputdir, "corr", "matlab")
Beispiel #6
0
    parser.add_argument("--tuningmode", choices=("circular", "gaussian"), default="gaussian", required=False, help="form of tuning curve")
    parser.add_argument("--basename", type=str, default="-", required=False)
    parser.add_argument("--stim", type=str, default="-", required=False)

    args = parser.parse_args()

    sc = SparkContext(args.master, "myscript")

    if args.master != "local":
        egg = glob.glob(os.path.join(os.environ['THUNDER_EGG'], "*.egg"))
        sc.addPyFile(egg[0])
    
    # load data file
    datafile = os.path.join(args.datafolder, args.imagename)
    outputdir = os.path.join(args.datafolder,"spark")
    data = load(sc, datafile, args.preprocess, 4)
    
    # drop key
    data = data.map(lambda (k, v): (k[0:3], v))
    data.cache()

    # compute mean map
    vals = stats(data,"mean")
    save(vals,outputdir,"mean_vals","matlab")

    # compute local cor
    if args.neighbourhood != 0:
        cor = localcorr(data,args.neighbourhood)
        save(cor,outputdir,"local_corr","matlab")

    # if stim argument is not default