def _init(self, invstmtrs): dtasof = invstmtrs.find('DTASOF').text self.datetime = types.DateTime.convert(dtasof) # INVTRANLIST tranlist = invstmtrs.find('INVTRANLIST') if tranlist is not None: self.transactions = INVTRANLIST(tranlist) # INVPOSLIST poslist = invstmtrs.find('INVPOSLIST') if poslist is not None: self.positions = [Aggregate.from_etree(pos) for pos in poslist] # INVBAL invbal = invstmtrs.find('INVBAL') if invbal is not None: # First strip off BALLIST & process it ballist = invbal.find('BALLIST') if ballist is not None: invbal.remove(ballist) self.other_balances = [Aggregate.from_etree(bal) for bal in ballist] # Now we can flatten the rest of INVBAL self.balances = Aggregate.from_etree(invbal) # Unsupported subaggregates for tag in ('INVOOLIST', 'INV401K', 'INV401KBAL', 'MKTGINFO'): child = invstmtrs.find(tag) if child is not None: invstmtrs.remove
def test_field_stats(client): create_statistics() by_field = Aggregate.get_info_by_field("model", "cucumber", "model", "country") assert len(by_field.keys()) == 3 assert by_field['total'] == 1 print(by_field) Statistic.objects().delete() Aggregate.objects().delete()
def on_get(self, req, resp): '''Handles get requests to /api/v1/stats''' stats = { 'model': Aggregate.get_most_popular('model', 90), 'country': Aggregate.get_most_popular("country", 90), 'total': Aggregate.get_count(90) } resp.body = json.dumps(stats)
def create_statistics(): stats = [] devices = ['cucumber', 'pumpkin', 'tomato', 'avocado', 'toast'] for i in range(len(devices)): for date in ['20170101', '20170102', '20170103', '20170104', '20170105']: for version in ['13.0', '14.1']: for t in ['NIGHTLY', 'UNOFFICIAL']: Statistic(d=str(i), m=devices[i], v='{}-{}-{}-{}'.format(version, date, t, devices[i]), u='US', c='Carrier', c_id='0').save() Aggregate.add_stat(d=str(i), m=devices[i], v='{}-{}-{}-{}'.format(version, date, t, devices[i]), u='US', c='Carrier', c_id='0')
def on_post(self, req, resp): '''Handles post requests to /api/v1/stats''' data = req.media Aggregate.add_stat(d=data['device_hash'], m=data['device_name'], v=data['device_version'], u=data['device_country'], c=data['device_carrier'], c_id=data['device_carrier_id']) resp.body = "neat" resp.content_type = "text/plain"
def test_get(client): create_statistics() expected = { 'model': Aggregate.get_most_popular('model', 90), 'country': Aggregate.get_most_popular('country', 90), 'total': Aggregate.get_count(90) } result = client.simulate_get('/api/v1/stats') assert result.status_code == 200 assert result.json == expected Statistic.objects().delete() Aggregate.objects().delete()
def on_get(self, req, resp): '''Render the main page''' stats = { "model": Aggregate.get_most_popular('model', 90), "country": Aggregate.get_most_popular("country", 90), "total": Aggregate.get_count(90) } template = load_template('index.html').render( stats=stats, columns=["model", "country"], date=datetime.utcnow().strftime("%Y-%m-%d %H:%M")) resp.content_type = 'text/html' resp.body = template
def get_location_aggregate(self, location: str, dataset_name: str) -> pd.DataFrame: aggregate_obj = Aggregate.get_aggregate(dataset_name=dataset_name, location=location) location_aggregate = None for i in range(min(2, len(aggregate_obj))): leg_aggregate = aggregate_obj[i].get_samples(quantity_name=self.features) location_aggregate = leg_aggregate if location_aggregate is None else location_aggregate + leg_aggregate return location_aggregate
def __init__(self, tranlist): # Initialize with *TRANLIST Element dtstart, dtend = tranlist[0:2] tranlist = tranlist[2:] self.dtstart = types.DateTime.convert(dtstart.text) self.dtend = types.DateTime.convert(dtend.text) self.extend([Aggregate.from_etree(tran) for tran in tranlist])
def copyTRNRS(self, trnrs): """ Attach the data fields from the *TRNRS wrapper to the STMT """ self.uid = types.String(36).convert(trnrs.find('TRNUID').text) self.status = Aggregate.from_etree(trnrs.find('STATUS')) cltcookie = trnrs.find('CLTCOOKIE') if cltcookie is not None: self.cookie = types.String(36).convert(cltcookie.text)
def on_get(self, req, resp, field, value): resp.status = falcon.HTTP_503 resp.body = "Individual stats are not currently available" resp.content_type = "text/html" return if not field in Aggregate.field_map.keys(): resp.status = falcon.HTTP_404 resp.content_type = "text/plain" resp.body = "Not Found" return valuemap = { 'model': ['version', 'country'], 'carrier': ['model', 'country'], 'version': ['model', 'country'], 'country': ['model', 'carrier'] } stats = Aggregate.get_info_by_field(field, value, left=valuemap[field][0], right=valuemap[field][1]) template = load_template('index.html').render( stats=stats, columns=valuemap[field], value=value, date=datetime.utcnow().strftime("%Y-%m-%d %H:%M")) resp.content_type = "text/html" resp.body = template
def test_aggregate(self): kwargs = {'dataset_name': 'ukdale'} aggregates = Aggregate.get_aggregate(**kwargs) houses = {a.location for a in aggregates} self.assertSetEqual( {'house_1', 'house_2', 'house_3', 'house_4', 'house_5'}, houses, "Didn't get correct aggregates when only dataset_name is specified" ) kwargs['location'] = 'house_1' aggregates = Aggregate.get_aggregate(**kwargs) houses = {a.location for a in aggregates} self.assertSetEqual( {'house_1'}, houses, "Didn't correctly get aggregate when location is specified") kwargs['location'] = 'house_10' aggregates = Aggregate.get_aggregate(**kwargs) self.assertEqual(0, len(aggregates), "Should have retrieved empty list")
def __init__(self, tree, strict=True): """ Initialize with ofx.ElementTree instance containing parsed OFX. The strict argument determines whether to throw an error for certain OFX data validation violations. """ # Keep a copy of the parse tree self.tree = tree # SONRS - server response to signon request sonrs = self.tree.find('SIGNONMSGSRSV1/SONRS') self.sonrs = Aggregate.from_etree(sonrs, strict=strict) # TRNRS - transaction response, which is the main section # containing account statements # # N.B. This iteration method doesn't preserve the original # ordering of the statements within the OFX response for stmtClass in (BankStatement, CreditCardStatement, InvestmentStatement): tagname = stmtClass._tagName for trnrs in self.tree.findall('*/%sTRNRS' % tagname): # *STMTTRNRS may have no *STMTRS (in case of error). # Don't blow up; skip silently. stmtrs = trnrs.find('%sRS' % tagname) if stmtrs is not None: stmt = stmtClass(stmtrs) # Staple the TRNRS wrapper data onto the STMT stmt.copyTRNRS(trnrs) self.statements.append(stmt) # SECLIST - list of description of securities referenced by # INVSTMT (investment account statement) seclist = self.tree.find('SECLISTMSGSRSV1/SECLIST') if seclist is None: return for sec in seclist: self.securities.append(Aggregate.from_etree(sec, strict=strict))
def _init(self, stmtrs): # BANKTRANLIST tranlist = stmtrs.find('BANKTRANLIST') if tranlist is not None: self.transactions = BANKTRANLIST(tranlist) # LEDGERBAL - mandatory self.ledgerbal = Aggregate.from_etree(stmtrs.find('LEDGERBAL')) # AVAILBAL availbal = stmtrs.find('AVAILBAL') if availbal is not None: self.availbal = Aggregate.from_etree(availbal) # BALLIST ballist = stmtrs.find('BALLIST') if ballist: self.other_balances = [Aggregate.from_etree(bal) for bal in ballist] # Unsupported subaggregates for tag in ('MKTGINFO', ): child = stmtrs.find(tag) if child: stmtrs.remove
def _get_aggregate_windows_without_activations(self, data_set_name, location, activations): """ function to set aggregate_timeframes for caching information to retrieve examples without activations :param data_set_name: data_set_name of aggregate :param location: location where aggregate measurements were taken :param activations: target appliance activations for slicing aggregate window without activations :return: np array containing tuples of timeframes, dataset and location """ aggregate = Aggregate.get_aggregate(location=location, dataset_name=data_set_name)[0] aggregate_data = aggregate.get_samples(quantity_name=self.features) gaps_between_activations = TimeFrameGroup() prev_end = aggregate_data.index[0] for activation in activations: gap = TimeFrame(prev_end, activation.index[0]) gaps_between_activations.append(gap) prev_end = activation.index[-1] gap = TimeFrame(prev_end, aggregate_data.index[-1]) gaps_between_activations.append(gap) good_sections = aggregate.get_good_sections(aggregate_data) print(len(good_sections)) print(len(gaps_between_activations)) print("Computing intersections") intersection = gaps_between_activations.greedy_find_intersection(good_sections) \ .remove_shorter_than(self.window_size) print("Iterating intersections") aggregate_timeframes = None for timeframe in intersection: splits = timeframe.split(self.window_size) split_arr = np.array( list( map(lambda split: (split, data_set_name, location), splits) ) ) if aggregate_timeframes is not None: aggregate_timeframes = np.concatenate( (aggregate_timeframes, split_arr) ) else: aggregate_timeframes = split_arr return aggregate_timeframes
from app import app, db from datetime import datetime from models import Aggregate, Day, Hours from bitcoin_api import buyHour, sellHour, buyDay, sellDay, buyMonth, sellMonth time_h = date.hour time_d = date.day time_m = date.month month = Aggregate.query.filter_by(month_number=time_m).first() if month is None: # Create Month, Day, Hour month = Aggregate(month=date.strftime("%B"), month_number=time_m, average_buy=-1, average_sell=-1) day = Day(month=date.strftime("%B"), month_number=time_m, average_buy=-1, average_sell=-1, parent_month=month, day_number=time_d) hour = Hours(hour_number=time_h, buy_price=-1, sell_price=-1, hour=day) db.session.add(month) db.session.add(day) db.session.add(hour) db.session.commit() buyHour(hour)
def insert_aggregate(self, house, should_commit=True): aggregate = Aggregate(location=house['location'], origin_dataset_id=self.origin_dataset.id) self.session.add(aggregate) if should_commit: self.session.commit() return aggregate
def test_popular_stats(client): create_statistics() popular = Aggregate.get_most_popular('model', 90) assert(len(popular) == 5) Statistic.objects().delete() Aggregate.objects().delete()
def creation(date, buy, sell): date = strptime(date, '%m/%d/%y %H:%M') time_h = date.tm_hour time_d = date.tm_mday time_m = date.tm_mon time_y = date.tm_year complete_month = 0 complete_day = 0 monthDict={1:'January', 2:'February', 3:'March', 4:'April', 5:'May', 6:'June', 7:'July', 8:'August', 9:'September', 10:'October', 11:'November', 12:'December'} month = Aggregate.query.filter_by(month_number=time_m, year=time_y).first() day = Day.query.filter_by(month_number=time_m, day_number=time_d).first() if month is None: # Create Month, Day, Hour month = Aggregate(year=time_y, month=monthDict[time_m], month_number=time_m, average_buy=-1, average_sell=-1) day = Day(year=time_y, month=monthDict[time_m], month_number=time_m, average_buy=-1, average_sell=-1, parent_month=month, day_number=time_d) hour = Hours(hour_number=time_h, buy_price=buy, sell_price=sell, belong_day=time_d, hour=day) db.session.add(month) db.session.add(day) db.session.add(hour) db.session.commit() else: day = Day.query.filter_by(day_number=time_d, month_id=month.id).first() if day is None: day = Day(year=time_y, month=monthDict[time_m], month_number=time_m, average_buy=-1, average_sell=-1, parent_month=month, day_number=time_d) hour = Hours(hour_number=time_h, buy_price=buy, sell_price=sell, belong_day=time_d, hour=day) db.session.add(day) db.session.add(hour) db.session.commit() else: hour = Hours.query.filter_by(hour_number=time_h, day_id=day.id).first() if hour is None: hour = Hours(hour_number=time_h, buy_price=buy, sell_price=sell, belong_day=time_d, hour=day) db.session.add(hour) db.session.commit() # Enough information to compute day statistics if(day.priceDay() is not -1): day.getStats() predict = Predictions(id=1, right=False, buy_prediction=False, sell_prediction=False) print predict.id predict.predict() if(month.priceMonth() is not -1): month.getStats() Predictions(right=False, buy_prediction=False, sell_prediction=False).predict()
def rpc_get_aggregate_metrics(self, project, typ="general/aggregate"): d, m = typ.split("/") res = Aggregate.get_aggregate(project.ph_id, slashify(project.ph_id, "aggregate", d)) return res[m]
def rpc_get_metric_tags(self, project): """Hash of dimentions and there metrics. """ if not project: return {"status": "error", "message": "project doesnt exist"} return Aggregate.get_dimentions(project.ph_id)
def creation(date, buy, sell): date = strptime(date, '%m/%d/%y %H:%M') print("%r" % date) time_h = date.tm_hour time_d = date.tm_mday time_m = date.tm_mon complete_month = 0 complete_day = 0 monthDict = { 1: 'Jan', 2: 'Feb', 3: 'Mar', 4: 'Apr', 5: 'May', 6: 'Jun', 7: 'Jul', 8: 'Aug', 9: 'Sep', 10: 'Oct', 11: 'Nov', 12: 'Dec' } month = Aggregate.query.filter_by(month_number=time_m).first() day = Day.query.filter_by(month_number=time_m).first() if month is None: # Create Month, Day, Hour month = Aggregate(month=monthDict[time_m], month_number=time_m, average_buy=-1, average_sell=-1) day = Day(month=monthDict[time_m], month_number=time_m, average_buy=-1, average_sell=-1, parent_month=month, day_number=time_d) hour = Hours(hour_number=time_h, buy_price=buy, sell_price=sell, hour=day) db.session.add(month) db.session.add(day) db.session.add(hour) db.session.commit() complete_month = buyMonth(month) sellMonth(month) else: day = Day.query.filter_by(day_number=time_d, month_id=month.id).first() if day is None: day = Day(month=monthDict[time_m], month_number=time_m, average_buy=-1, average_sell=-1, parent_month=month, day_number=time_d) hour = Hours(hour_number=time_h, buy_price=buy, sell_price=sell, day_id=day.id) db.session.add(day) db.session.add(hour) db.session.commit() complete_day = buyDay(day) sellDay(day) else: hour = Hours.query.filter_by(hour_number=time_h, day_id=day.id).first() if hour is None: hour = Hours(hour_number=time_h, buy_price=buy, sell_price=sell, day_id=day.id) db.session.add(hour) db.session.commit()
def test_post(client): result = client.simulate_post('/api/v1/stats', body=json.dumps(dict(device_hash='1', device_name='cucumber', device_version='14.1-20170101-NIGHTLY-cucumber', device_country='US', device_carrier='Carrier', device_carrier_id='0'))) assert result.status_code == 200 assert(Statistic.objects().count() == 1) Statistic.objects().delete() Aggregate.objects().delete()
def __init__(self, stmtrs): """ Initialize with *STMTRS Element """ self.currency = stmtrs.find('CURDEF').text self.account = Aggregate.from_etree(stmtrs.find(self._acctTag)) self._init(stmtrs)