Example #1
0
    def test_fourier(self):
        data_local = [
            array([1.0, 2.0, -4.0, 5.0, 8.0, 3.0, 4.1, 0.9, 2.3]),
            array([2.0, 2.0, -4.0, 5.0, 3.1, 4.5, 8.2, 8.1, 9.1]),
        ]

        data = self.sc.parallelize(zip(range(1, 3), data_local))

        vals = Fourier(2).calc(data)
        assert(allclose(vals.map(lambda (_, v): v[0]).collect()[0], 0.578664))
        assert(allclose(vals.map(lambda (_, v): v[1]).collect()[0], 4.102501))
class FourierTest(ThunderDataTest):
    def __init__(self, sc):
        ThunderDataTest.__init__(self, sc)
        self.method = Fourier(freq=5)

    def runtest(self):
        vals = self.method.calc(self.rdd)
        vals.count()
Example #3
0
class FourierTest(ThunderDataTest):

    def __init__(self, sc):
        ThunderDataTest.__init__(self, sc)
        self.method = Fourier(freq=5)

    def runtest(self):
        vals = self.method.calc(self.rdd)
        vals.count()
Example #4
0
    def test_fourier(self):
        data_local = [
            array([1.0, 2.0, -4.0, 5.0, 8.0, 3.0, 4.1, 0.9, 2.3]),
            array([2.0, 2.0, -4.0, 5.0, 3.1, 4.5, 8.2, 8.1, 9.1]),
        ]

        data = self.sc.parallelize(zip(range(1, 3), data_local))

        vals = Fourier(2).calc(data)
        assert (allclose(vals.map(lambda (_, v): v[0]).collect()[0], 0.578664))
        assert (allclose(vals.map(lambda (_, v): v[1]).collect()[0], 4.102501))
 def __init__(self, sc):
     ThunderDataTest.__init__(self, sc)
     self.method = Fourier(freq=5)
Example #6
0
 def __init__(self, sc):
     ThunderDataTest.__init__(self, sc)
     self.method = Fourier(freq=5)
Example #7
0
from pyspark import SparkContext
from thunder.timeseries import Fourier
from thunder.utils import load
from thunder.utils import save

if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        description="compute a fourier transform on each time series")
    parser.add_argument("master", type=str)
    parser.add_argument("datafile", type=str)
    parser.add_argument("outputdir", type=str)
    parser.add_argument("freq", type=int)
    parser.add_argument("--preprocess",
                        choices=("raw", "dff", "dff-highpass", "sub"),
                        default="raw",
                        required=False)

    args = parser.parse_args()

    sc = SparkContext(args.master, "fourier")

    if args.master != "local":
        egg = glob.glob(os.path.join(os.environ['THUNDER_EGG'], "*.egg"))
        sc.addPyFile(egg[0])

    data = load(sc, args.datafile, args.preprocess).cache()
    out = Fourier(freq=args.freq).calc(data)

    outputdir = args.outputdir + "-fourier"
    save(out, outputdir, "fourier", "matlab")