def add_villager(): #this function will allow us to add a new villager to the databasesystem try: print(request.get_json()) villager_db_data["villager_name"] = request.get_json()["villager_name"] villager_db_data["villager_identification"] = request.get_json( )["villager_identification"] villager_db_data["village_name"] = requests.get_json()["village_name"] villager_db_data["village_contact_phone"] = requests.get_json( )["village_contact_phone"] villager_db_data["village_contact_landline"] = requests.get_json( )["village_contact_landline"] villager_db_data["villager_age"] = requests.get_json()["villager_age"] villager_db_data["villager_occupation_status"] = requests.get_json( )["villager_occupation_status"] except: abort(400) print("coming here") #now we just need to insert this data into the db #NOT DOING ANY ERROR HANDLING RIGHT NOW AS WE DO NOT HAVE MUCH TIME #hence directly inserting everything into the database #we add villager_db_data directly into the daatbase #db_handler ==== db3_handler try: db3_handler.insert_one(villager_db_data) return 1 except: return 0
def get_trending_stocks(): """ returns list of trending stock symbols, ensuring each symbol is part of a NYSE or NASDAQ """ trending = requests.get_json(ST_BASE_URL + 'trending/symbols.json', params=ST_BASE_PARAMS)['symbols'] symbols = [s['symbol'] for s in trending if s['exchange'] in EXCHANGES] return symbols
def update(): ret = { 'object': 'user', 'action': 'update', 'server_time': datetime.now().strftime('%Y-%m-%d %H:%M:%S'), 'payload': [] } data = requests.get_json() if data is None: return jsonify(ret), 400 try: uid = data['uid'] except KeyError as err: print(err) return jsonify(ret), 400 key = [] value = [] for i in range(len(user_key)): try: value.append(data[user_key[i]]) key.append(user_key[i]) except KeyError as err: pass sql = 'UPDATE user SET ' sql += '=%s, '.join(key) + '=%s ' sql += 'WHERE uid = ' + uid try: db_query(sql, tuple(value)) return jsonify(ret), 200 except MySQLdb.Error as err: print(err) return jsonify(ret), 500
def receive_block(): values = requests.get_json() new_block = values['block'] old_block = blockchain.last_block if new_block['index'] == oldblock['index'] + 1: # index is correct if new_block['previous_hash'] == blockchain.hash(old_block): # hashes are correct block_string = json.dumps(old_block, sort_keys=True).encode() if blockchain.valid_proof(block_string, new_block['proof']): # proof is valid blockchain.add_block(new_block) return "Block accepted" else: # bad proof, handle case pass else: # hashes don't match, handle error pass else: # their index is one greater # block could be invalid # we could be behind # #do consense process: # # poll all nodes in chain and get biggest one: pass
def add_transaction(): json = requests.get_json() transaction_keys = ['sender', 'receiver', 'amount'] if not all (key in json for key in transaction_keys): return 'Bad JSON file', 400 index = blockchain.add_transaction(json['sender'], json['receiver'], json['amount']) response = {'message': 'This transaction will be addes to block {index}'} return jsonify(response, 201)
def get_watched_stocks(wl_id): """ Get list of symbols being watched by specified StockTwits watchlist """ wl = requests.get_json(ST_BASE_URL + 'watchlists/show/{}.json'.format(wl_id), params=ST_BASE_PARAMS) wl = wl['watchlist']['symbols'] return [s['symbol'] for s in wl]
def add_transaction(): json=requests.get_json() transaction_keys=['sender','receiver','amount'] if not all (key in json for key in transaction_keys): return 'Some key is missing',400 index=blockchain.add_transaction(json['sender'],json['receiver'],json['amount']) response={'message':f'This transaction will be added to Block{index}'} return jsonify(response),201
def add_transaction(): json = requests.get_json(); transaction_keys = ['sender', 'reciever', 'amount'] if not all (key in json for key in transaction_keys): return 'bad request',400 index = blockchain.add_transaction(json['sender'], json['reciever'], json['amount']) res = {'message':f'your transaction will be added in block {index}'} return jsonify(res),201
def add_village(): #this api will be used to add a new village into the database try: villages_db_data["village_name"] = request.get_json()["village_name"] villages_db_data["village_pos_lat"] = request.get_json( )["village_pos_lat"] villages_db_data["village_pos_lon"] = requests.get_json( )["village_pos_lon"] villages_db_data["village_state"] = requests.get_json( )["village_state"] villages_db_data["village_score"] = requests.get_json( )["village_score"] villages_db_data["village_govt_auth"] = requests.get_json( )["village_govt_auth"] villages_db_data["village_govt_auth_contact"] = requests.get_json( )["village_govt_auth_contact"] villages_db_data["village_local_auth"] = requests.get_json( )["village_local_auth"] villages_db_data["village_local_auth_contact"] = requests.get_json( )["village_local_auth_contact"] except: abort(400) #now we just need to insert this data into the db #NOT DOING ANY ERROR HANDLING RIGHT NOW AS WE DO NOT HAVE MUCH TIME #hence directly inserting everything into the database #we add villager_db_data directly into the daatbase #db_handler ==== db3_handler try: db2_handler.insert_one(villages_db_data) return 1 except: return 0
def connect_node(): json = requests.get_json() nodes = json.get('nodes') if nodes is None: return "No node", 400 for node in nodes: blockchain.add_node(node) response = {'message': "Success", "total_nodes": list(blockchain.nodes)} return jsonify(response), 201
def get_stock_stream(symbol): """ gets stream of messages for given symbol """ all_params = ST_BASE_PARAMS.copy() for k, v in params.iteritems(): all_params[k] = v return requests.get_json(ST_BASE_URL + 'streams/symbol/{}.json'.format(symbol), params=all_params)
def webhook(): req = requests.get_json(silent = True, force = True) print(json.dumps(req, indent = 4)) res = makeResponse(req) res = json.dumps(res, indent = 4) r = make_response(res) r.headers ['Content-Type'] = 'application/json' return r
def get_message_stream(wl_id, params={}): """ Gets up to 30 messages from Watchlist (wl_id) according to additional params """ all_params = ST_BASE_PARAMS.copy() for k, v in params.iteritems(): all_params[k] = v return requests.get_json(ST_BASE_URL + 'streams/watchlist/{}.json'.format(wl_id), params=all_params)
def get_complaints(): try: complaint_resource_tag = requests.get_json("complaint_resource_tag") except: abort(400) #now we need to access the complaints database data_retrieved = db4_handler.find( {"complaint_resource_tag": complaint_resource_tag}, {"_id": 0}) return json.dumps(list(data_retrieved))
def api_create_grid(): """API Call string of parameters from API Call """ params = requests.get_json() word1 = params['word1'] word2 = params['word2'] word3 = params['word3'] D = params['dimension'] x = params['density'] P1 = params['p1'] P2 = params['p2'] P3 = params['p3'] N = D * D words = [word1, word2, word3] P = [P1, P2, P3] # create grid DXD grid = WordGrid.Grid(D) my_grid = WordGrid.grid.create_grid() #need to figure out how to store this grid for searches in #susbequent API calls below def create_dict(words): word_dict = {} for word in words: word_dict[word] = len(word) weights = list(word_dict.values()) return word_dict, weights word_dict, weights = create_dict(words) #sample list of words equal to density x% # n is size of random samples created to select subset of x%,default set at 50 words_for_grid = word_list.get_words(word_dict=word_dict, weights=weights, n=50, N=N, x=x) words_out = ' '.join(words_for_grid) #enter words in grid with probability vector P for word in words: grid.add_words_multi(my_grid, word, words_for_grid, P) grid.fill_up_grid(my_grid) #return list of words that were entered in grid return jsonify(words_out)
def api_word_search(): #Assumes the previous grid has been stored #And search is on this grid findwords = requests.get_json().keys() results = [] for word in findwords: results.append(grid.word_search(my_grid, word)) return jsonify(results)
def new_transaction(): values = requests.get_json() required = ['sender', 'recipient', 'amount'] if not all(k in values for k in required): return 'missing values', 412 #precondition failed else: index = blockchain.new_transaction(values['sender'], values['recipient'], values['amount']) response = {'message': f'transaction will be added to block {index}'} return jsonify(response), 201 #fulfill and something created
def register_nodes(): values = requests.get_json() nodes = values.get('nodes') if nodes is None: return "need nodes", 400 for node in nodes: blockchain.register_node(node) response = { 'message': 'new nodes added', 'total_nodes': list(blockchain.nodes) } return jsonify(response)
def connect_node(): """Connect a node to the blockchain network """ json_file = requests.get_json() nodes = json_file.get('nodes') if nodes is None: return "no nodes", 400 for node in nodes: blockchain.add_node(node) response = { 'message': 'All nodes connected', 'total_nodes': list(blockchain.nodes) } return jsonify(response), 201
def login(): data = requests.get_json(force=True) emailOfUser = data.get("email") uid = "" message = "" try: user = auth.get_user_by_email(emailOfUser) message = "Succesfully Got The new user" uid = user.uid except: messge = "User not there in firebase" return {"uid":uid, "message":message, "Response":200} return jsonify({"Response":200})
def add_transaction(): """Add a tranaction to a mined block """ json_file = requests.get_json() transaction_keys = ['sender', 'reciever', 'amount'] if not all(key in json_file for key in transaction_keys): return 'Some elements are missing', 400 idx = blockchain.add_transaction( json_file['sender'], json_file['reciever'], json_file['amount'], ) response = {'message': f'This transaction added to block {index}'} return jsonify(response), 201
def register_nodes(): values = requests.get_json() nodes = Values.get('nodes') if nodes is None: return "Error: Please supply a valid list of nodes", 400 for node in nodes: blockchain.register_node(node) response = { 'message': 'New noded have been added', 'total_nodes': list(blockchain.nodes), } return jsonify(response), 201
def clean_watchlist(wl_id): """ Deletes stocks to follow if they aren't part of NASDAQ or NYSE """ wl = requests.get_json(ST_BASE_URL + 'watchlists/show/{}.json'.format(wl_id), params=ST_BASE_PARAMS)['watchlist']['symbols'] qty_deleted = 0 for sym in wl: if sym['exchange'] not in EXCHANGES: log.info("Removing {}".format(sym)) if delete_from_watchlist(sym['symbol'], wl_id=wl_id): qty_deleted += 1 else: log.error( "Error deleting symbol from watchlist: {}".format(sym)) return qty_deleted
def signup(): data = requests.get_json(force=True) emailOfUser = data.get("email") passwordOfUser = data.get("password") uid = "" message = "" try: user = auth.create_user( email=emailOfUser, email_verified=False, password=passwordOfUser) message = "Successfully created new user" uid = user.uid except: message = "User already there" return {"uid":uid, "message":message, "Response":200} return jsonify({"Response":200})
def get_new_data( self, now ): new_row = {} self.is_trading_locked = False new_row[ 'timestamp' ] = now.strftime( "%Y-%m-%d %H:%M" ) # Calculate moving averages and RSI values for a_kraken_ticker, a_robinhood_ticker in config[ 'ticker_list' ].items(): if ( not config[ 'debug_enabled' ] ): try: result = get_json( 'https://api.kraken.com/0/public/Ticker?pair=' + str( a_kraken_ticker ) ).json() if ( len( result[ 'error' ] ) == 0 ): new_row[ a_robinhood_ticker ] = round( float( result[ 'result' ][ a_kraken_ticker ][ 'a' ][ 0 ] ), 3 ) except: print( 'An exception occurred retrieving prices.' ) self.is_trading_locked = True return self.data else: new_row[ a_robinhood_ticker ] = round( float( randint( 10, 100 ) ), 3 ) self.data = self.data.append( new_row, ignore_index = True ) # If the Kraken API is overloaded, they freeze the values it returns if ( ( self.data.tail( 4 )[ a_robinhood_ticker ].to_numpy()[ -1 ] == self.data.tail( 4 )[ a_robinhood_ticker ].to_numpy() ).all() ): print( 'Repeating values detected for ' + str( a_robinhood_ticker ) + '. Ignoring data point.' ) self.data = self.data[:-1] elif ( self.data.shape[ 0 ] > 0 ): self.data[ a_robinhood_ticker + '_SMA_F' ] = self.data[ a_robinhood_ticker ].shift( 1 ).rolling( window = config[ 'moving_average_periods' ][ 'sma_fast' ] ).mean() self.data[ a_robinhood_ticker + '_SMA_S' ] = self.data[ a_robinhood_ticker ].shift( 1 ).rolling( window = config[ 'moving_average_periods' ][ 'sma_slow' ] ).mean() self.data[ a_robinhood_ticker + '_RSI' ] = RSI( self.data[ a_robinhood_ticker ].values, timeperiod = config[ 'rsi_period' ] ) self.data[ a_robinhood_ticker + '_MACD' ], self.data[ a_robinhood_ticker + '_MACD_S' ], macd_hist = MACD( self.data[ a_robinhood_ticker ].values, fastperiod = config[ 'moving_average_periods' ][ 'macd_fast' ], slowperiod = config[ 'moving_average_periods' ][ 'macd_slow' ], signalperiod = config[ 'moving_average_periods' ][ 'macd_signal' ] ) if ( config[ 'save_charts' ] == True ): slice = self.data[ [ a_robinhood_ticker, str( a_robinhood_ticker ) + '_SMA_F', str( a_robinhood_ticker ) + '_SMA_S' ] ] fig = slice.plot.line().get_figure() fig.savefig( 'chart-' + str( a_robinhood_ticker ).lower() + '-sma.png', dpi = 300 ) plt.close( fig ) return self.data
def init_data( self ): print( 'Starting with a fresh dataset.' ) # Download historical data from Kraken column_names = [ 'timestamp' ] for a_robinhood_ticker in config[ 'ticker_list' ].values(): column_names.append( a_robinhood_ticker ) self.data = pd.DataFrame( columns = column_names ) for a_kraken_ticker, a_robinhood_ticker in config[ 'ticker_list' ].items(): try: result = get_json( 'https://api.kraken.com/0/public/OHLC?interval=' + str( config[ 'bot' ][ 'minutes_between_updates' ] ) + '&pair=' + a_kraken_ticker ).json() historical_data = pd.DataFrame( result[ 'result' ][ a_kraken_ticker ] ) historical_data = historical_data[ [ 0, 1 ] ] self.api_error_counter = 0 # Be nice to the Kraken API sleep( 3 ) except: print( 'An exception occurred retrieving historical data from Kraken.' ) self.api_error_counter = self.api_error_counter + 1 return False # Convert timestamps self.data[ 'timestamp' ] = [ pd.Timestamp( datetime.fromtimestamp( x ).strftime( "%Y-%m-%d %H:%M" ) ) for x in historical_data[ 0 ] ] # Copy the data self.data[ a_robinhood_ticker ] = [ round( float( x ), 3 ) for x in historical_data[ 1 ] ] # Calculate the indicators self.data[ a_robinhood_ticker + '_SMA_F' ] = self.data[ a_robinhood_ticker ].shift( 1 ).rolling( window = config[ 'ta' ][ 'moving_average_periods' ][ 'sma_fast' ] ).mean() self.data[ a_robinhood_ticker + '_SMA_S' ] = self.data[ a_robinhood_ticker ].shift( 1 ).rolling( window = config[ 'ta' ][ 'moving_average_periods' ][ 'sma_slow' ] ).mean() self.data[ a_robinhood_ticker + '_EMA_F' ] = self.data[ a_robinhood_ticker ].ewm( span = config[ 'ta' ][ 'moving_average_periods' ][ 'ema_fast' ], adjust = False, min_periods = config[ 'ta' ][ 'moving_average_periods' ][ 'ema_fast' ]).mean() self.data[ a_robinhood_ticker + '_EMA_S' ] = self.data[ a_robinhood_ticker ].ewm( span = config[ 'ta' ][ 'moving_average_periods' ][ 'ema_slow' ], adjust = False, min_periods = config[ 'ta' ][ 'moving_average_periods' ][ 'ema_slow' ]).mean() self.data[ a_robinhood_ticker + '_RSI' ] = RSI( self.data[ a_robinhood_ticker ].values, timeperiod = config[ 'ta' ][ 'rsi_period' ] ) self.data[ a_robinhood_ticker + '_MACD' ], self.data[ a_robinhood_ticker + '_MACD_S' ], macd_hist = MACD( self.data[ a_robinhood_ticker ].values, fastperiod = config[ 'ta' ][ 'moving_average_periods' ][ 'macd_fast' ], slowperiod = config[ 'ta' ][ 'moving_average_periods' ][ 'macd_slow' ], signalperiod = config[ 'ta' ][ 'moving_average_periods' ][ 'macd_signal' ] )
def get_discount(user_id): headers = request.headers # check header here if 'Authorization' not in headers: return Response(json.dumps({"error": "missing auth"}), status=401, mimetype='application/json') try: content = requests.get_json() payment_id = content["payment_id"] order_id = content["order_id"] user_id = content["payment_id"] except Exception: return json.dumps({"message": "error reading arguments"}) payload = {"objtype": "payment", "objkey": payment_id} url = db['name'] + '/' + db['endpoint'][0] response = requests.get(url, params=payload) dis_json = {"discount_applied": response['discount_applied']} return (dis_json)
def send_command(): command = requests.get_json()['command'] try: pod_communicator.send_command(command) finally: return jsonify({'status': 'ok'})
def get_new_data(self, now): # If the current dataset has gaps in it, we refresh it from Kraken if self.data_has_gaps(now): self.init_data() new_row = {'timestamp': now.strftime("%Y-%m-%d %H:%M")} # Calculate moving averages and RSI values for a_kraken_ticker, a_robinhood_ticker in config['ticker_list'].items( ): if not config['simulate_api_calls']: try: result = get_json( 'https://api.kraken.com/0/public/Ticker?pair=' + str(a_kraken_ticker)).json() if len(result['error']) == 0: new_row[a_robinhood_ticker] = round( float(result['result'][a_kraken_ticker]['a'][0]), 3) except: print('An exception occurred retrieving prices.') return False else: new_row[a_robinhood_ticker] = round( float(randint(400000, 500000)), 3) # If the new price is more than 40% lower/higher than the previous reading, assume it's an error somewhere percent_diff = (abs(new_row[a_robinhood_ticker] - self.data.iloc[-1][a_robinhood_ticker]) / self.data.iloc[-1][a_robinhood_ticker]) * 100 if percent_diff > 30: print('Error: new price ($' + str(new_row[a_robinhood_ticker]) + ') differs ' + str(round(percent_diff, 2)) + '% from previous value, ignoring.') return False self.data = self.data.append(new_row, ignore_index=True) # If the Kraken API is overloaded, they freeze the values it returns if (self.data.tail(4)[a_robinhood_ticker].to_numpy()[-1] == self.data.tail(4)[a_robinhood_ticker].to_numpy()).all(): print('Repeating values detected for ' + str(a_robinhood_ticker) + '. Ignoring data point.') self.data = self.data[:-1] elif self.data.shape[0] > 0: self.data[a_robinhood_ticker + '_SMA_F'] = self.data[a_robinhood_ticker].rolling( window=config['moving_average_periods'] ['sma_fast']).mean() self.data[a_robinhood_ticker + '_SMA_S'] = self.data[a_robinhood_ticker].rolling( window=config['moving_average_periods'] ['sma_slow']).mean() self.data[a_robinhood_ticker + '_SMA_S'] = self.data[a_robinhood_ticker].rolling( window=config['moving_average_periods'] ['sma_slow']).mean() self.data[ a_robinhood_ticker + '_EMA_F'] = self.data[a_robinhood_ticker].ewm( span=config['moving_average_periods']['ema_fast'], adjust=False, min_periods=config['moving_average_periods'] ['ema_fast']).mean() self.data[ a_robinhood_ticker + '_EMA_S'] = self.data[a_robinhood_ticker].ewm( span=config['moving_average_periods']['ema_slow'], adjust=False, min_periods=config['moving_average_periods'] ['ema_slow']).mean() self.data[a_robinhood_ticker + '_RSI'] = RSI( self.data[a_robinhood_ticker].values, timeperiod=config['rsi_period']) self.data[a_robinhood_ticker + '_MACD'], self.data[ a_robinhood_ticker + '_MACD_S'], macd_hist = MACD( self.data[a_robinhood_ticker].values, fastperiod=config['moving_average_periods'] ['macd_fast'], slowperiod=config['moving_average_periods'] ['macd_slow'], signalperiod=config['moving_average_periods'] ['macd_signal']) if config['save_charts'] == True: self.save_chart([ a_robinhood_ticker, str(a_robinhood_ticker) + '_SMA_F', str(a_robinhood_ticker) + '_SMA_S' ], str(a_robinhood_ticker) + '_sma') self.save_chart([ a_robinhood_ticker, str(a_robinhood_ticker) + '_EMA_F', str(a_robinhood_ticker) + '_EMA_S' ], str(a_robinhood_ticker) + '_ema') self.save_chart_rescale( [a_robinhood_ticker, str(a_robinhood_ticker) + '_RSI'], str(a_robinhood_ticker) + '_rsi') self.save_chart_rescale([ a_robinhood_ticker, str(a_robinhood_ticker) + '_MACD', str(a_robinhood_ticker) + '_MACD_S' ], str(a_robinhood_ticker) + '_macd') return True
def get_Updates(token=get_token()): bot_api = "https://api.telegram.org/bot{0}/{1}".format(token, "getUpdates") request = requests.get_json(bot_api) # Изменил с get на get_json надо проверить s = request.json() # Если вариант выше заработает то не нужна return request