def main(): args = setupArgs() formatOptions = FormatOptions(args.sum, args.empty) ledger = csvIo.read(args.journal) setDefaultArgs(args, ledger) # Create backup ledger backupLedger(args) if args.read is not None: readIn.read(ledger, args) csvIo.write(ledger, args.journal) if args.cash: inputHandler.addManualTransaction(ledger) csvIo.write(ledger, args.journal) if args.budget is not None: queryInput = QueryInput(args.balance, Timeframe(args.start, args.end), args.period, args.exact) budget.compareToBudget(ledger, args.budget, queryInput, formatOptions) if args.balance is not None: queryInput = QueryInput(args.balance, Timeframe(args.start, args.end), args.period, args.exact) if args.average: ledger.printAverages(queryInput, formatOptions) else: ledger.printAccounts(queryInput, formatOptions) if args.register is not None: queryInput = QueryInput(args.register, Timeframe(args.start, args.end), args.period, args.exact) ledger.printTransactions(queryInput, formatOptions) if args.plot is not None: if "all" in args.plot: args.plot = sorted(list(plots.plots.keys())) plots.doPlots(ledger, args)
def get_FDP_distances(): json_data = get_json_data() if not json_data: return make_response(jsonify({"error": "Invalid JSON data!"}), 401) timeframe = Timeframe(json_data['starttime'], json_data['endtime']) df = DB.full.loc[:, json_data['intersections']] df = timeframe.trim(df) df = df.sum(axis=1, level=0, skipna=True) df = intersection_distances(df) df = df.where(pd.notnull(df), None) return jsonify({"columns": df.columns.values.tolist(), "matrix": df.values.tolist()})
def get_markers(): json_data = get_json_data() timeframe = Timeframe(json_data['starttime'], json_data['endtime']) df = timeframe.trim(DB.full) d_sum = df.groupby(axis=1, level=0).apply(np.nansum) df = timeframe.trim(DB.dist_sd) col_count = df.columns.get_level_values(0).value_counts() abs_above = (df > 3).groupby(axis=1, level=0).sum() pct_above = (abs_above.sum(axis=0) / (df.shape[0] * col_count)).round(decimals=2) abs_below = (df < -3).groupby(axis=1, level=0).sum() pct_below = ((abs_below / col_count).apply(np.nansum) / df.shape[0]).round(decimals=2) return jsonify({"total_passings": d_sum.to_dict(), "pct_above": pct_above.to_dict(), "pct_below": pct_below.to_dict(), "measurements": timeframe.indices})
def query_frequency(request): query = request.GET.get("query", None) response_data = {} sample = 500 if query is not None: # Get Timeframe e.g. process time from request request_timeframe = Timeframe(start=request.GET.get("start", None), end=request.GET.get("end", None), interval=request.GET.get( "interval", "hour")) data = None try: # Query GNIP and get frequency data = Frequency(query=query, sample=sample, start=request_timeframe.start, end=request_timeframe.end) except GNIPQueryError as e: return handleQueryError(e) response_data["frequency"] = data.freq response_data["sample"] = sample return HttpResponse(json.dumps(response_data), content_type="application/json")
def query_chart(request): """ Returns query chart for given request """ # TODO: Move this to one line e.g. queries to query query = request.GET.get("query", None) queries = request.GET.getlist("queries[]") if query: queries = [query] request_timeframe = Timeframe(start=request.GET.get("start", None), end=request.GET.get("end", None), interval=request.GET.get("interval", "hour")) response_chart = None try: response_chart = Chart(queries=queries, start=request_timeframe.start, end=request_timeframe.end, interval=request_timeframe.interval) except GNIPQueryError as e: return handleQueryError(e) response_data = {} response_data['days'] = request_timeframe.days response_data['start'] = request_timeframe.start.strftime(DATE_FORMAT_JSON) response_data['end'] = request_timeframe.end.strftime(DATE_FORMAT_JSON) response_data['columns'] = response_chart.columns response_data['total'] = response_chart.total return HttpResponse(json.dumps(response_data), content_type="application/json")
def setDefaultArgs(args, ledger): """Set some default args that can only be determined once the ledger file has been read""" if args.start is None: args.start = ledger.getFirstTransactionDate() if args.end is None: args.end = ledger.getLastTransactionDate() args.timeframe = Timeframe(args.start, args.end)
def request_timeframe(self, request): """ Returns a timeframe to use in the API query """ gnip_logger.info("Received request for timeline") request_timeframe = Timeframe(start=request.GET.get("start", None), end=request.GET.get("end", None), interval=request.GET.get( "interval", "hour")) return request_timeframe
def query_tweets(request): """ Returns tweet query """ request_timeframe = Timeframe(start=request.GET.get("start", None), end=request.GET.get("end", None), interval=request.GET.get("interval", "hour")) query_count = int(request.GET.get("embedCount", TWEET_QUERY_COUNT)) export = request.GET.get("export", None) query = request.GET.get("query", "") try: tweets = Tweets(query=query, query_count=query_count, start=request_timeframe.start, end=request_timeframe.end, export=export) except GNIPQueryError as e: return handleQueryError(e) response_data = {} if export == "csv": response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="export.csv"' writer = csv.writer(response, delimiter=',', quotechar="'", quoting=csv.QUOTE_ALL) writer.writerow([ 'count', 'time', 'id', 'user_screen_name', 'user_id', 'status', 'retweet_count', 'favorite_count', 'is_retweet', 'in_reply_to_tweet_id', 'in_reply_to_screen_name' ]) count = 0 for t in tweets.get_data(): count = count + 1 body = t['body'].encode('ascii', 'replace') status_id = t['id'] status_id = status_id[status_id.rfind(':') + 1:] user_id = t['actor']['id'] user_id = user_id[user_id.rfind(':') + 1:] writer.writerow([ count, t['postedTime'], status_id, t['actor']['preferredUsername'], user_id, body, t['retweetCount'], t['favoritesCount'], 'X', 'X', 'X' ]) return response else: response_data['tweets'] = tweets.get_data() return HttpResponse(json.dumps(response_data), content_type="application/json")
def plotNetworth(ledger, timeframe, smooth=False): start = ledger.getFirstTransactionDate() plotAccounts( ledger, [["assets", "liabilities"], ["assets"], ["liabilities"]], Timeframe(start, timeframe.end), "Month", "Amount [€]", ["Net worth", "Assets", "Liabilities"], smooth=smooth, invert=[False, False, True], totals=True, )
def get_data(): data = dict() data['pathData'] = dict() data['maxVal'] = 0 json_data = get_json_data() if not json_data: return make_response(jsonify({"error": "Invalid JSON data!"}), 401) timeframe = Timeframe(json_data['starttime'], json_data['endtime']) aggregated = False if 'aggregated' in json_data['graph_options']: json_data['graph_options'].remove('aggregated') aggregated = True def prep_for_jsonify(df, key): if df.empty: data['pathData'][key] = dict() else: if aggregated: df = df.sum(axis=1, level=0, skipna=True) if json_data['bin_size'] > 1: df = bin_df(df, json_data['bin_size']) data['maxVal'] = max(data['maxVal'], df.max().max()) df = df.where(pd.notnull(df), None) data['pathData'][key] = df.to_dict(orient='list') else: data['maxVal'] = max(data['maxVal'], df.max(skipna=True).max()) if json_data['bin_size'] > 1: df = bin_df(df, json_data['bin_size']) # Replace NaN with None s.t. we get proper null values in the JSON once we jsonify the df. df = df.where(pd.notnull(df), None) # Build a dictionary from the multicolumn df data['pathData'][key] = { k[0]+' '+k[1]: v for k, v in df.to_dict(orient='list').items()} for graph_option in json_data['graph_options']: if graph_option == 'mean': df = DB.mean.loc[:, json_data['intersections']] df = timeframe.trim(df) prep_for_jsonify(df, graph_option) elif graph_option == 'median': df = DB.median.loc[:, json_data['intersections']] df = timeframe.trim(df) prep_for_jsonify(df, graph_option) df = DB.full.loc[:, json_data['intersections']] df = timeframe.trim(df) prep_for_jsonify(df, 'aggregated') data['dates'] = timeframe.get_dates()[::json_data['bin_size']] data['coordinates'] = DB.coordinates.to_dict('index') if pd.isna(data['maxVal']): data['maxVal'] = 0 return jsonify(**data)
def subdivideTime(timeframe: Timeframe, period: Period) -> List[Timeframe]: """Subdivide the time between startTime and endTime given into periods of length period and return tuples containing the start and end time of the respective period""" if period.isInfinite(): return [timeframe] delta = period.delta periods = [] today = timeframe.start oneDay = timedelta(1) while today <= timeframe.end: afterOnePeriod = today + delta periods.append( Timeframe(today, afterOnePeriod - oneDay) ) # Shift the last date backward one day so we dont have overlapping dates. today = afterOnePeriod return periods
def query_frequency(request): query = request.GET.get("query", None) response_data = {} sample = 500 if query is not None: # Get Timeframe e.g. process time from request request_timeframe = Timeframe( start=request.GET.get( "start", None), end=request.GET.get( "end", None), interval=request.GET.get( "interval", "hour")) # Query GNIP and get frequency data = Frequency(query=query, sample=sample, start=request_timeframe.start, end=request_timeframe.end) response_data["frequency"] = data.freq response_data["sample"] = sample response = HttpResponse( json.dumps(response_data), content_type="application/json") response['Cache-Control'] = 'max-age=%d' % MAX_AGE return response