def create(self): """ Returns data in format {"columns": } used in UI """ # New gnip client with fresh endpoint g = GnipSearchAPI(settings.GNIP_USERNAME, settings.GNIP_PASSWORD, settings.GNIP_SEARCH_ENDPOINT, paged=True) columns = [] for q in self.queries: timeline = None try: timeline = g.query_api(pt_filter = str(q), max_results = 0, use_case = "timeline", start = self.start.strftime(self.DATE_FORMAT), end = self.end.strftime(self.DATE_FORMAT), count_bucket = self.interval, csv_flag = False) except GNIPQueryError as e: print e # Process timeseries on the GNIP Data time_series_data = Timeseries(q, timeline, columns, self.total, self.x_axis) column = time_series_data.columns if self.x_axis == None: self.x_axis = time_series_data.xAxis columns.insert(0, self.x_axis) columns.append(time_series_data.series) self.total = time_series_data.total return columns
def create(self): """ Returns data in format {"columns": } used in UI """ # New gnip client with fresh endpoint g = get_gnip(True) columns = [] for q in self.queries: timeline = g.query_api(pt_filter=str(q), max_results=0, use_case="timeline", start=self.start.strftime(self.DATE_FORMAT), end=self.end.strftime(self.DATE_FORMAT), count_bucket=self.interval, csv_flag=False) # Process timeseries on the GNIP Data time_series_data = Timeseries(q, timeline, columns, self.total, self.x_axis) column = time_series_data.columns if self.x_axis == None: self.x_axis = time_series_data.xAxis columns.insert(0, self.x_axis) columns.append(time_series_data.series) self.total = time_series_data.total return columns
def create(self): """ Returns data in format {"columns": } used in UI """ data = {} columns = [] for q in self.queries: timeline = None try: request = requests.GNIP(request=self.request, query=q) timeline = request.get_timeline() if 'start' not in data: data['start'] = request.start.strftime( self.DATE_FORMAT_JSON) if 'end' not in data: data['end'] = request.end.strftime(self.DATE_FORMAT_JSON) if 'days' not in data: data['days'] = request.days except GNIPQueryError as e: print e # Process timeseries on the GNIP Data time_series_data = Timeseries(q, timeline, columns, self.total, self.x_axis) column = time_series_data.columns if self.x_axis is None: self.x_axis = time_series_data.xAxis columns.insert(0, self.x_axis) columns.append(time_series_data.series) self.total = time_series_data.total data['columns'] = columns data['total'] = self.total return data
from tree import Node_of_tree from anytree.search import findall from anytree import RenderTree import numpy as np import math print("#################") print("##### INIT. #####") print("#################") # generate some timeseries # S0-S1 with sine waves # S2-S7 with sine waves, moved by 180° np.random.seed(seed=42) series = {} series['S0'] = Timeseries('S0', SinGenerator()) series['S1'] = Timeseries('S1', SinGenerator()) series['S2'] = Timeseries('S2', SinGenerator(start=math.pi)) series['S3'] = Timeseries('S3', SinGenerator(start=math.pi)) series['S4'] = Timeseries('S4', SinGenerator(start=math.pi)) series['S5'] = Timeseries('S5', SinGenerator(start=math.pi)) series['S6'] = Timeseries('S6', SinGenerator(start=math.pi)) series['S7'] = Timeseries('S7', SinGenerator(start=math.pi)) # Initialize root node of the tree (the initial cluster) and set # timeseries to initial cluster root_node = Node_of_tree('root_node') root_node.set_cluster_timeseries(series) # initial run: let the tree grow to cluster the timeseries # for each active cluster - get next value for each series
def arima_call(self, dataset, output, next_forecast, queue): timeseries = Timeseries(dataset) timeseries.execute(output, next_forecast) print() queue.put(timeseries)
application = Flask(__name__) # init firestore db cred = credentials.Certificate('key.json') default_app = initialize_app(cred) db = firestore.client() # create collections in firebase wmtr_ref = db.collection('worldmeter-aws') bno_ref = db.collection('bno') # init worldmeter source wmtr = WorldMeter() # init timeseries ts = Timeseries() # function to update database @application.route('/update', methods=['POST']) def update_db(): try: with open('api.key') as infile: api_key = infile.readline().replace('\n', '').strip() if api_key == request.args.get('x-api-key'): wmtr.fetch() wmtr.parse() timestamp = str(datetime.utcnow()) data = wmtr.get_data() for d in data: country = d['country']