def getRealtimePrice(): print("There is a request for realtime market snapshot.") print(request.args) print(get_ipaddr()) my_ip_set.add(get_ipaddr()) symbol = request.args['symbol'] symbolist = symbol.split(',') symbollen = min(len(symbolist), MAX_SYMBOLS) print(symbollen) symbolist = symbolist[:symbollen] print(symbolist) dflist = [] for symbol in symbolist: url = "https://www.asx.com.au/asx/1/share/%s"%symbol print(url) df = pd.read_json(url,orient='columns',typ='series') df = df[['code','bid_price','offer_price','open_price','last_price','change_in_percent', 'change_price','day_high_price','day_low_price','average_daily_volume','volume', 'previous_close_price','previous_day_percentage_change','eps','pe','annual_dividend_yield', 'market_cap','number_of_shares','year_change_in_percentage','year_change_price','year_high_date', 'year_high_price','year_low_date','year_low_price','year_open_date','year_open_price']] df_T = pd.DataFrame(df).transpose() dflist.append(df_T) result = pd.concat(dflist) result.set_index(['code'],inplace =True) print(result) return result.to_json(orient='index'),200
def handle_429(e): """Renders full error page for too many site queries""" html = render.html("429") client_addr = get_ipaddr() count_ratelimit.labels(e, client_addr).inc() logger.error(f"Error: {e}, Source: {client_addr}") return html, 429
def getRevenue(): print("There is a request for revenue in a year.") my_ip_set.add(get_ipaddr()) symbol = request.args['symbol'] year = request.args['year'] df = td.getRevenue(symbol) df = df[df.index.str[-4:] == year] df.rename(columns={"Total Revenue":"totalrevenue", "Cost of Revenue":"costofrevenue", "Gross Profit":"grossprofit", "Total Operating Expenses":"totalopexpense", "Earnings Before Interest and Taxes":"EBIT", "Income Before Tax":"incomebeforetax", "Net Income Applicable To Common Shares":"netincome"}, inplace=True) df = df[["totalrevenue", "costofrevenue", "grossprofit", "totalopexpense", "EBIT", "incomebeforetax", "netincome"]] df.index = pd.to_datetime(df.index) df.index = df.index.strftime('%Y-%m-%d') return df.to_json(orient='index'),200
def handle_500(e): """General Error Page""" client_addr = get_ipaddr() count_errors.labels(500, e, client_addr, None, None, None).inc() logger.error(f"Error: {e}, Source: {client_addr}") html = render.html("500") return html, 500
def getHistroyPrice(): print("There is a request for histroical price within a range.") print(request.args) my_ip_set.add(get_ipaddr()) symbol = request.args['symbol'] frequency = request.args['frequency'] datefrom = request.args['datefrom'] dateto = request.args['dateto'] if frequency =='weekly': df = td.getHisWeeklyPrice(symbol,datefrom,dateto).set_index('Date') df.rename(columns={"Close*": "Close", "Adj. close**": "Adjust Close"},inplace=True) df.index = df.index.strftime('%Y-%m-%d') return (df.to_json(orient='index',date_format='iso',date_unit='s')),200 if frequency =='daily': df = td.getHisDailyPrice(symbol,datefrom,dateto).set_index('Date') df.rename(columns={"Close*": "Close", "Adj. close**": "Adjust Close"},inplace=True) try: df = df[df.Open.str.contains("Dividend") == False] except: print('this is not downloaded file') df.index = df.index.strftime('%Y-%m-%d') return (df.to_json(orient='index',date_format='iso',date_unit='s')),200 return 'false request'
def handle_404(e): """Renders full error page for invalid URI""" html = render.html("404") path = request.path client_addr = get_ipaddr() count_notfound.labels(e, path, client_addr).inc() logger.error(f"Error: {e}, Path: {path}, Source: {client_addr}") return html, 404
def get_remote_ip(cls): if not has_request_context(): return '127.0.0.1' # 'HTTP_X_REAL_IP': '10.255.0.3', # 'HTTP_X_FORWARDED_FOR': '114.198.145.117, 10.255.0.3', # 'HTTP_X_FORWARDED_PROTO': 'http', # 'HTTP_X_FORWARDED_PORT': '443', return get_ipaddr()
def ratekeyfunc(): addr = get_ipaddr() params = {} for argname in arglist: params[argname] = str(request.view_args[argname]) # we need to encode the url properly instead of just doing &name=value... # so some one can't insert stuff to make the request unique key = addr + urlencode(params) return key
def save_client_log(): try: log = request.json with (open("/tmp/covid-client-map.log", "a")) as f: log["remote_addr"] = get_ipaddr() f.write(json.dumps(log) + "\n") f.close() except Exception as e: print(e) return jsonify({"ok": 200})
def DownloadFile (): my_ip_set.add(get_ipaddr()) symbol = request.args['symbol'] file_name = ct.DAILY_PRICE_FILE%symbol print(file_name) print('try to download') if os.path.isfile(file_name): try: return send_file(file_name, as_attachment=True),200 except: abort(500) else: abort(404)
def get_ipaddr_or_eppn() -> str: """ Uses eppn if supplied else remote address for rate limiting """ current_app.logger.debug('REQUEST ENVIRONMENT:') current_app.logger.debug(request.environ) identifier = request.environ.get('HTTP_EPPN', None) current_app.logger.debug(f'Identifier from request environment: {identifier}') if not identifier: current_app.logger.warning('HTTP_EPPN is missing from request environment') identifier = get_ipaddr() current_app.logger.debug(f'Identifier from get_ipaddr: {identifier}') return identifier
def getBalanceSheet(): print("There is a request for balance sheet in a year.") my_ip_set.add(get_ipaddr()) symbol = request.args['symbol'] year = request.args['year'] df = td.getBalanceSheet(symbol) df = df[df.index.str[-4:] == year] df.rename(columns={"Total assets": "totalasset", "Total liabilities": "totaldebt","Net receivables":"receivable", "Accounts payable":"payable","Total current assets":"currentassets", "Total current liabilities":"currentdebts","Cash and cash equivalents":"totalcash","Net tangible assets":"nettangibleassets"}, inplace=True) df = df[["totalasset","totaldebt","receivable","payable","currentassets","currentdebts","totalcash","nettangibleassets"]] df.index = pd.to_datetime(df.index) df.index = df.index.strftime('%Y-%m-%d') return df.to_json(orient='index'),200
def getCashflow(): print("There is a request for cash flow in a year.") my_ip_set.add(get_ipaddr()) symbol = request.args['symbol'] year = request.args['year'] df = td.getCashflow(symbol) df = df[df.index.str[-4:] == year] df.rename(columns={"Total cash flow from operating activities":"cashfromoperating", "Capital expenditure":"capitalexpenditure", "Total cash flow from investment activities":"cashfrominvestment", "Total cash flow from financing activities":"cashfromfinancing", "Change in cash and cash equivalents":"changeincash"}, inplace=True) df = df[["cashfromoperating", "capitalexpenditure", "cashfrominvestment", "cashfromfinancing", "changeincash"]] df.index = pd.to_datetime(df.index) df.index = df.index.strftime('%Y-%m-%d') return df.to_json(orient='index'),200
def getValuation(): print("There is a request for valuation of certain symbol.") my_ip_set.add(get_ipaddr()) symbol = request.args['symbol'] df = td.getStatistics(symbol) df.rename(columns={"Market cap (intra-day) 5":"marketcap", "Enterprise value 3":"enterprisevalue", "Trailing P/E":"PE", "Forward P/E 1":"anticipatedPE", "PEG ratio (5-yr expected) 1":"PEG", "Price/sales (ttm)":"PS", "Price/book (mrq)":"PB", "Enterprise value/revenue 3": "enterprisevalue2revenue", "Enterprise value/EBITDA 6":"enterprisevalue2EBITDA"}, inplace=True) df['symbol'] = symbol df.set_index(['symbol'],inplace =True) print(df) return df.to_json(orient='index'),200
def handle_config(): return jsonify({'ip_addr': get_ipaddr()})
def task_upload(): form_file = "file" form_sha256 = "sha256" form_filename = "filename" form_trim = "trim" form_percentage = "percentage" form_npage = "npage" form_layout = "layout" form_trimlayout = "trimlayout" form_hidelogo = "hidelogo" form_privacy = "privacy" if request.form.get(form_privacy) == None: abort( 400, "Senza aver accettato l'informativa sulla privacy non è possibile proseguire" ) if (form_file not in request.files or request.files[form_file].filename == "") \ and form_sha256 not in request.form: abort( 400, "Manca sia il file sia il suo sha256. Inviare almeno uno dei due") if (form_file not in request.files or request.files[form_file].filename == "") \ and form_sha256 in request.form \ and not has_already_been_uploaded(request.form[form_sha256]): abort(400, "Questo sha256 non corrisponde a nessun file sul server") if form_file in request.files and request.files[form_file].filename != "": file = request.files[form_file] if file and is_pdf(file): filename = sha256(file) file.save( os.path.join(current_app.config["UPLOAD_FOLDER"], filename)) client_filename = os.path.basename( secure_filename(request.files[form_file].filename)) else: abort(400, "PDF è il solo formato accettato") else: filename = request.form[form_sha256] client_filename = os.path.basename( secure_filename(request.form[form_filename])) trim = (request.form.get(form_trim) != None) try: npage = int(request.form[form_npage]) if npage != 1 and npage != 2 and npage != 3: raise ValueError except KeyError: npage = 3 except ValueError: npage = 3 try: percentage = int(request.form[form_percentage]) if percentage < 35 or percentage > 55: raise ValueError except KeyError: percentage = 50 except ValueError: percentage = 50 percentage = float(percentage) / 100 try: layout = abs(int(request.form[form_layout])) if layout < 1 or layout > 6 or layout == 5: raise ValueError except KeyError: layout = 1 except ValueError: layout = 1 trimlayout = (request.form.get(form_trimlayout) != None) if layout == 1: trimlayout = False if layout == 3 or layout == 6: trimlayout = True if trimlayout == True: trim = False hidelogo = (request.form.get(form_hidelogo) != None) original_layout = {"slides": layout, "trim": trimlayout} options = { "trim": trim, "npage": npage, "percentage": percentage, "showlogo": not hidelogo } task = generate_pdf.delay(filename=filename, client_filename=client_filename, original_layout=original_layout, options=options, client_ua=request.headers.get('User-Agent'), client_ip=get_ipaddr()) if client_wants_json(): return jsonify_success({"task_id": task.id}), 202 return render_template("job.html", task_id=task.id, status="PENDING", progress=0), 202
def hyperglass_main(): """Main backend application initiator. Ingests Ajax POST data from form submit, passes it to the backend application to perform the filtering/lookups""" # Get JSON data from Ajax POST lg_data = request.get_json() logger.debug(f"Unvalidated input: {lg_data}") # Return error if no target is specified if not lg_data["target"]: logger.debug("No input specified") return Response(config["messages"]["no_input"], codes["danger"]) # Return error if no location is selected if lg_data["location"] not in configuration.hostnames(): logger.debug("No selection specified") return Response(config["messages"]["no_location"], codes["danger"]) # Return error if no query type is selected if lg_data["type"] not in [ "bgp_route", "bgp_community", "bgp_aspath", "ping", "traceroute", ]: logger.debug("No query specified") return Response(config["messages"]["no_query_type"], codes["danger"]) client_addr = get_ipaddr() count_data.labels(client_addr, lg_data["type"], lg_data["location"], lg_data["target"]).inc() logger.debug(f"Client Address: {client_addr}") # Stringify the form response containing serialized JSON for the request, use as key for k/v # cache store so each command output value is unique cache_key = str(lg_data) # Define cache entry expiry time cache_timeout = config["features"]["cache"]["timeout"] logger.debug(f"Cache Timeout: {cache_timeout}") # Check if cached entry exists if not r_cache.hgetall(cache_key): try: logger.debug(f"Sending query {cache_key} to execute module...") cache_value = execute.Execute(lg_data).response() value_output = cache_value["output"] value_code = cache_value["status"] logger.debug( f"Validated response...\nStatus Code: {value_code}, Output: {value_output}" ) # If it doesn't, create a cache entry r_cache.hmset(cache_key, cache_value) r_cache.expire(cache_key, cache_timeout) logger.debug(f"Added cache entry for query: {cache_key}") # If 200, return output response = r_cache.hgetall(cache_key) if value_code == 200: logger.debug(f"Returning {value_code} response") return Response(response["output"], response["status"]) # If 400 error, return error message and code if value_code in [405, 415]: count_errors.labels( response["status"], codes_reason[response["status"]], client_addr, lg_data["type"], lg_data["location"], lg_data["target"], ).inc() logger.debug(f"Returning {value_code} response") return Response(response["output"], response["status"]) except: logger.error(f"Unable to add output to cache: {cache_key}") raise # If it does, return the cached entry else: logger.debug(f"Cache match for: {cache_key}, returning cached entry") response = r_cache.hgetall(cache_key) return Response(response["output"], response["status"])
def _limiter_key(): if current_user.is_authenticated: return "u_{}".format(current_user.id) return "ip_{}".format(get_ipaddr())