Example #1
0
	def createTimeseries(self, usernames, ts_start, ts_end, ts_window):
		#This function goes through the profiles
		#and returns the timeseries of a list of user
		tseries = Timeseries(ts_start, ts_end, ts_window)
		for username in usernames:
			if username in self.profiles:
				for c in self.profiles[username]:
					tseries.addTimestamp(c.timestamp)
		return tseries	
Example #2
0
    def create(self):
        """
        Returns data in format {"columns": } used in UI
        """
        # New gnip client with fresh endpoint
        g = get_gnip(True)

        columns = []
        for q in self.queries:
            timeline = g.query_api(pt_filter=str(q),
                                   max_results=0,
                                   use_case="timeline",
                                   start=self.start.strftime(self.DATE_FORMAT),
                                   end=self.end.strftime(self.DATE_FORMAT),
                                   count_bucket=self.interval,
                                   csv_flag=False)

            # Process timeseries on the GNIP Data
            time_series_data = Timeseries(q, timeline, columns, self.total,
                                          self.x_axis)
            column = time_series_data.columns

            if self.x_axis == None:
                self.x_axis = time_series_data.xAxis
                columns.insert(0, self.x_axis)

            columns.append(time_series_data.series)
            self.total = time_series_data.total

        return columns
Example #3
0
    def create(self):
        """
        Returns data in format {"columns": } used in UI
        """
        # New gnip client with fresh endpoint
        g = GnipSearchAPI(settings.GNIP_USERNAME,
                          settings.GNIP_PASSWORD,
                          settings.GNIP_SEARCH_ENDPOINT,
                          paged=True)
        columns = []
        for q in self.queries:
            timeline = None
            try:
                timeline = g.query_api(pt_filter = str(q),
                            max_results = 0,
                            use_case = "timeline",
                            start = self.start.strftime(self.DATE_FORMAT),
                            end = self.end.strftime(self.DATE_FORMAT),
                            count_bucket = self.interval,
                            csv_flag = False)
            except GNIPQueryError as e:
                print e

            # Process timeseries on the GNIP Data
            time_series_data = Timeseries(q, timeline, columns, self.total, self.x_axis)
            column = time_series_data.columns

            if self.x_axis == None:
                self.x_axis = time_series_data.xAxis
                columns.insert(0, self.x_axis)

            columns.append(time_series_data.series)
            self.total = time_series_data.total

        return columns
Example #4
0
def stand(x, m, s):
    '''Standardize timeseries x by mean m and std deviation s

    Args: 
    x: Timeseries that is beign standardized
    m: Mean of the timeseries after standardization
    s: Standard deviation of the timeseries after standardization

    Output:
    A timeseries with mean 0 and standard deviation 1
    '''
    vals = np.array(list(iter(x)))
    vals = (vals - m) / s
    return ts.TimeSeries(vals, list(x.itertimes()))
Example #5
0
    def create(self):
        """
        Returns data in format {"columns": } used in UI
        """
        data = {}
        columns = []
        for q in self.queries:

            timeline = None
            try:
                request = requests.GNIP(request=self.request, query=q)
                timeline = request.get_timeline()
                if 'start' not in data:
                    data['start'] = request.start.strftime(
                        self.DATE_FORMAT_JSON)
                if 'end' not in data:
                    data['end'] = request.end.strftime(self.DATE_FORMAT_JSON)
                if 'days' not in data:
                    data['days'] = request.days

            except GNIPQueryError as e:
                print e

            # Process timeseries on the GNIP Data
            time_series_data = Timeseries(q, timeline, columns, self.total,
                                          self.x_axis)
            column = time_series_data.columns

            if self.x_axis is None:
                self.x_axis = time_series_data.xAxis
                columns.insert(0, self.x_axis)

            columns.append(time_series_data.series)
            self.total = time_series_data.total

        data['columns'] = columns
        data['total'] = self.total
        return data
Example #6
0
#!/usr/bin/python
# Extracts useful time series from the gsfc ice extent/area data.
# Must be given that data in stdin.

import sys
from timeseries import Timeseries
from datetime import datetime

ts = Timeseries()
first = True
for line in sys.stdin:
    if first:
        # Ignore header line
        first = False
        continue
    values = line.split()
    (year, month, value) = [int(values[x]) for x in (0, 1, 3)]
    d = datetime(year, month, 15)
    ts[d] = value/1e6

def year_to_bounds(y):
    s = "%04d-01-01 00:00" % (y,)
    e = "%04d-01-01 00:00" % (y+1,)
    return s, e

(start_year, end_year) = [x.year for x in ts.bounding_dates()]

print "Minimums"
for y in range(start_year, end_year+1):
    s, e = year_to_bounds(y)
    print "%04d %f" % (y, ts.min(s, e))
Example #7
0
from tree import Node_of_tree
from anytree.search import findall
from anytree import RenderTree
import numpy as np
import math

print("#################")
print("##### INIT. #####")
print("#################")

# generate some timeseries
# S0-S1 with sine waves
# S2-S7 with sine waves, moved by 180°
np.random.seed(seed=42)
series = {}
series['S0'] = Timeseries('S0', SinGenerator())
series['S1'] = Timeseries('S1', SinGenerator())
series['S2'] = Timeseries('S2', SinGenerator(start=math.pi))
series['S3'] = Timeseries('S3', SinGenerator(start=math.pi))
series['S4'] = Timeseries('S4', SinGenerator(start=math.pi))
series['S5'] = Timeseries('S5', SinGenerator(start=math.pi))
series['S6'] = Timeseries('S6', SinGenerator(start=math.pi))
series['S7'] = Timeseries('S7', SinGenerator(start=math.pi))

# Initialize root node of the tree (the initial cluster) and set
# timeseries to initial cluster
root_node = Node_of_tree('root_node')
root_node.set_cluster_timeseries(series)

# initial run: let the tree grow to cluster the timeseries
# for each active cluster - get next value for each series
'''

if __name__ == "__main__":

    indexes = np.random.choice(1000, 20, replace=False)
    vantagePtList = []
    dbList = []

    #Create TS Referencing
    #The 20 randomally selected vantagePtFiles
    for j in range(20):
        fileName = 'tsdata/ts' + str(indexes[j]) + '.dat'
        dbName = "tsdb/db" + str(j) + ".dbdb"
        x = np.loadtxt(fileName, delimiter=' ')
        vantagePt = ts.TimeSeries(x[:, 1], x[:, 0])
        vantagePtList.append(vantagePt)
        ##Remove DB if it has previously been created
        if os.path.exists(dbName):
            os.remove(dbName)

        # Connect to Databses
        db = lab10.connect(dbName)
        dbList.append(db)

    #For all 20 Databases
    #Loop through 1000 TimeSeries
    #Add Key = Distance(vantagePt, comparePt)
    #Value = comparePT's fileName
    for i in range(1000):
        fileName = 'tsdata/ts' + str(i) + '.dat'
Example #9
0
def test_kernel_corr():
    ts1 = ts.TimeSeries([0, 1, 2, 3, 4, 3, 2, 1, 0],
                        [1, 2, 3, 4, 5, 6, 7, 8, 9])
    ts2 = ts.TimeSeries([1, 2, 3, 4, 3, 2, 1, 0, 0],
                        [1, 2, 3, 4, 5, 6, 7, 8, 9])
    assert abs(kernel_corr(ts1, ts2, 10) - 1) < 1e-3
Example #10
0
def test_maxccor():
    ts1 = ts.TimeSeries([0, 1, 2, 3, 4, 3, 2, 1, 0],
                        [1, 2, 3, 4, 5, 6, 7, 8, 9])
    ts2 = ts.TimeSeries([1, 2, 3, 4, 3, 2, 1, 0, 0],
                        [1, 2, 3, 4, 5, 6, 7, 8, 9])
    assert abs(max_corr_at_phase(ts1, ts2)[1] - 1.0) < 1e-5
Example #11
0
def test_self_maxccor():
    ts1 = ts.TimeSeries([100, 101, 102, 103], [1, 2, 3, 4])
    assert max_corr_at_phase(ts1, ts1)[1] == 1.0
Example #12
0
def random_ts(a):
    t = np.arange(0.0, 1.0, 0.01)
    v = a * np.random.random(100)
    return ts.TimeSeries(v, t)
Example #13
0
def tsmaker(m, s, j):
    t = np.arange(0.0, 1.0, 0.01)
    v = norm.pdf(t, m, s) + j * np.random.randn(100)
    return ts.TimeSeries(v, t)
Example #14
0
 def arima_call(self, dataset, output, next_forecast, queue):
     timeseries = Timeseries(dataset)
     timeseries.execute(output, next_forecast)
     print()
     queue.put(timeseries)
Example #15
0
Returns (Prints):
#### Nearest Timeseries ####
tsdata/ts398.dat


Requires folder:
tsdb

'''

if __name__ == "__main__":
    # Load in the TS to Evaluate
    filename = sys.argv[1]
    x = np.loadtxt(filename, delimiter=' ')
    origTs = ts.TimeSeries(x[:,1],x[:,0])
    time = np.arange(0.0, 1.0, 0.01)
    testTs = origTs.interpolate(time)

    # Find the Nearest vantagePt
    minDist = float('inf')
    for j in range(20):
        dbName = "tsdb/db"+str(j)+".dbdb"
        db = lab10.connect(dbName)
        vantagePtFile = db.get(0)
        x = np.loadtxt(vantagePtFile, delimiter=' ')
        comparePt = ts.TimeSeries(x[:,1],x[:,0])
        dist = 2*(1-ss.kernel_corr(comparePt,testTs))
        if dist < minDist:
            minDist = dist
            minDbName = dbName
Example #16
0
def test_stand():
    ts1 = ts.TimeSeries([100, 101, 102, 103], [1, 2, 3, 4])
    ts1_stand = stand(ts1, np.mean([100, 101, 102, 103]),
                      np.std([100, 101, 102, 103]))
    assert np.std(list(iter(ts1_stand))) == 1.0
Example #17
0
application = Flask(__name__)

# init firestore db
cred = credentials.Certificate('key.json')
default_app = initialize_app(cred)
db = firestore.client()

# create collections in firebase
wmtr_ref = db.collection('worldmeter-aws')
bno_ref = db.collection('bno')

# init worldmeter source
wmtr = WorldMeter()

# init timeseries
ts = Timeseries()


# function to update database
@application.route('/update', methods=['POST'])
def update_db():
    try:
        with open('api.key') as infile:
            api_key = infile.readline().replace('\n', '').strip()
            if api_key == request.args.get('x-api-key'):
                wmtr.fetch()
                wmtr.parse()
                timestamp = str(datetime.utcnow())
                data = wmtr.get_data()
                for d in data:
                    country = d['country']