def getrecentclassab(): rev=raw_revision() cblock=rev['last_block'] ckey="data:tx:recentab:"+str(cblock) try: response=json.loads(lGet(ckey)) print_debug(("cache looked success",ckey),7) except: print_debug(("cache looked failed",ckey),7) limit = 50 ROWS=dbSelect("select txj.txdata from txjson txj,transactions tx where tx.txdbserialnum = txj.txdbserialnum and txj.protocol = 'Omni' and (tx.txclass = 1 or tx.txclass = 2) and txj.txdbserialnum > 0 order by txj.txdbserialnum DESC limit %s;",[limit]) data = [] pnl=getpropnamelist() if len(ROWS) > 0: for d in ROWS: res = addName(d[0],pnl) try: res['confirmations'] = cblock - res['block'] + 1 except: pass #if cblock hasn't caught up make sure we don't return negative weirdness if res['confirmations'] < 0: res['confirmations'] = 0 data.append(res) response={'note':'Endpoint returns 50 most recent txs only', 'transactions':data} #cache pages for 1 hour lSet(ckey,json.dumps(response)) lExpire(ckey,3600) cachetxs(data) return jsonify(response)
def getblocktxjson(block): bhash=getblockhash(block) if "error" in bhash: return bhash ckey="data:block:txjson:"+str(block) try: response=json.loads(lGet(ckey)) print_debug(("cache looked success",ckey),7) except: print_debug(("cache looked failed",ckey),7) try: block_ = int( block ) #check numeric ROWS=dbSelect("select txj.txdata from transactions t, txjson txj where t.txdbserialnum = txj.txdbserialnum and t.protocol != 'Bitcoin' and t.txblocknumber=%s", [block_]) except Exception as e: return {'error':'This endpoint only consumes valid input. Invalid block'} pnl=getpropnamelist() ret=[] for x in ROWS: try: txJson = json.loads(x[0]) except TypeError: txJson = x[0] ret.append(addName(txJson,pnl)) response = {"block":block_, "blockhash":bhash, "transactions": ret} #cache for 30 min lSet(ckey,json.dumps(response)) lExpire(ckey,1800) return response
def getrawpending(): rev = raw_revision() cblock = rev['last_block'] ckey = "data:tx:pendinglist:" + str(cblock) try: response = json.loads(lGet(ckey)) print_debug(("cache looked success", ckey), 7) except: print_debug(("cache looked failed", ckey), 7) ROWS = dbSelect( "select txj.txdata, extract(epoch from tx.txrecvtime) from txjson txj,transactions tx where tx.txdbserialnum=txj.txdbserialnum and txj.protocol = 'Omni' and txj.txdbserialnum < 0 order by txj.txdbserialnum ASC;" ) data = [] index = {} pnl = getpropnamelist() if len(ROWS) > 0: for d in ROWS: res = addName(d[0], pnl) if 'blocktime' not in res: try: res['blocktime'] = int(d[1]) except: pass data.append(res) try: index[res['referenceaddress']].append(res) except: index[res['referenceaddress']] = [res] response = {'data': data, 'index': index} #cache for 5 min lSet(ckey, json.dumps(response)) lExpire(ckey, 300) return response
def getaddresshistraw(address,page): rev=raw_revision() cblock=rev['last_block'] try: page=int(page) except: page=1 atc=getaddresstxcount(address) pcount=atc['pages'] txcount=atc['txcount'] adjpage=page adjpage-=1 if adjpage<0: adjpage=0 if adjpage>pcount: adjpage=pcount #toadd=[] limit=10 offset=adjpage*10 ckey="data:addrhist:"+str(address)+":"+str(adjpage) try: #check cache txlist = json.loads(lGet(ckey)) print_debug(("cache looked success",ckey),7) except: print_debug(("cache looked failed",ckey),7) ROWS=[] if limit > 0: #ROWS=dbSelect("select txj.txdata from txjson txj, (select distinct txdbserialnum from addressesintxs where address=%s and txdbserialnum > 0) q where q.txdbserialnum=txj.txdbserialnum order by txj.txdbserialnum desc limit %s offset %s",(address,limit,offset)) #ROWS=dbSelect("select txdata from txjson where (txdata->>'sendingaddress'=%s or txdata->>'referenceaddress'=%s) and txdbserialnum > 0 order by txdbserialnum desc limit %s offset %s",(address,address,limit,offset)) ROWS=dbSelect("with temp as (select distinct(txdbserialnum) as txdbserialnum from addressesintxs where address=%s and txdbserialnum > 0 order by txdbserialnum desc limit %s offset %s) select txj.txdata from txjson txj, temp where txj.txdbserialnum=temp.txdbserialnum",(address,limit,offset)) #set and cache data for 7 min pnl=getpropnamelist() txlist=[] for r in ROWS: txJson=addName(r[0],pnl) txlist.append(txJson) #txlist = toadd+txlist lSet(ckey,json.dumps(txlist)) lExpire(ckey,420) for tx in txlist: try: tx['confirmations'] = cblock - tx['block'] + 1 except: pass cachetxs(txlist) response = { 'address': address, 'transactions': txlist , 'pages': pcount, 'current_page': page , 'txcount': txcount } return response
def getblocktxjson(block): bhash = getblockhash(block) if "error" in bhash: return bhash rev = raw_revision() cblock = rev['last_block'] ckey = "data:block:txjson:" + str(block) try: response = json.loads(lGet(ckey)) print_debug(("cache looked success", ckey), 7) except: print_debug(("cache looked failed", ckey), 7) try: block_ = int(block) #check numeric ROWS = dbSelect( "select txj.txdata from transactions t, txjson txj where t.txdbserialnum = txj.txdbserialnum and t.protocol != 'Bitcoin' and t.txblocknumber=%s", [block_]) except Exception as e: return { 'error': 'This endpoint only consumes valid input. Invalid block' } pnl = getpropnamelist() ret = [] for x in ROWS: try: txJson = json.loads(x[0]) except TypeError: txJson = x[0] ret.append(addName(txJson, pnl)) response = { "block": block_, "blockhash": bhash, "transactions": ret, "count": len(ret) } #cache for 6 hours lSet(ckey, json.dumps(response)) lExpire(ckey, 21600) for res in response['transactions']: try: res['confirmations'] = cblock - res['block'] + 1 except: pass #if cblock hasn't caught up make sure we don't return negative weirdness if res['confirmations'] < 0: res['confirmations'] = 0 return response
def getblockslistraw(lastblock=0): try: block = int(lastblock) except: block = 0 rev = raw_revision() cblock = rev['last_block'] if block < 1 or block > cblock: block = cblock ckey = "data:tx:blocks:" + str(block) try: response = json.loads(lGet(ckey)) print_debug(("cache looked success", ckey), 7) except: print_debug(("cache looked failed", ckey), 7) ROWS = dbSelect( "select t.blocknumber,extract(epoch from t.blocktime),t.blockcount,b.blockhash,t.value from txstats t, blocks b where t.blocknumber=b.blocknumber and t.blocknumber <= %s order by t.blocknumber desc limit 10;", [block]) response = {'latest': cblock, 'blocks': []} pnl = getpropnamelist() for r in ROWS: bnum = r[0] try: value = r[4] q = value['total_usd'] except: try: value = json.loads(r[4]) except: value = {'error': True, 'msg': 'calculations missing'} try: for pid in value['details']: value['details'][pid]['name'] = pnl[str(pid)]['name'] value['details'][pid]['flags'] = pnl[str(pid)]['flags'] except: pass ret = { 'block': bnum, 'timestamp': r[1], 'omni_tx_count': r[2], 'block_hash': r[3], 'value': value } response['blocks'].append(ret) #cache block list for 6 hours lSet(ckey, json.dumps(response)) lExpire(ckey, 21600) response['latest'] = cblock return response
def getrawpending(addr=None): rev = raw_revision() cblock = rev['last_block'] ckey = "data:tx:pendinglist:" + str(cblock) if addr is not None: ckey = ckey + ":" + str(addr) try: response = json.loads(lGet(ckey)) print_debug(("cache looked success", ckey), 7) except: print_debug(("cache looked failed", ckey), 7) if addr is not None: ROWS = dbSelect( "select txj.txdata, extract(epoch from tx.txrecvtime) from txjson txj,transactions tx,addressesintxs atx where tx.txdbserialnum=txj.txdbserialnum and atx.txdbserialnum=txj.txdbserialnum and atx.address=%s and txj.protocol = 'Omni' and txj.txdbserialnum < 0 order by txj.txdbserialnum ASC limit 25;", [addr]) else: ROWS = dbSelect( "select txj.txdata, extract(epoch from tx.txrecvtime) from txjson txj,transactions tx where tx.txdbserialnum=txj.txdbserialnum and txj.protocol = 'Omni' and txj.txdbserialnum < 0 order by txj.txdbserialnum ASC limit 25;" ) data = [] index = {} pnl = getpropnamelist() if len(ROWS) > 0: for d in ROWS: res = addName(d[0], pnl) if 'blocktime' not in res: try: res['blocktime'] = int(d[1]) except: pass data.append(res) #index by sending address try: index[res['sendingaddress']].append(res) except: index[res['sendingaddress']] = [res] #index by receiving address if exists try: index[res['referenceaddress']].append(res) except: try: index[res['referenceaddress']] = [res] except: pass response = {'data': data, 'index': index} #cache for 1 min lSet(ckey, json.dumps(response)) lExpire(ckey, 60) return response
def gettxjson(hash_id): try: transaction_ = str(re.sub(r'\W+', '', hash_id.split('.')[0])) #check alphanumeric except ValueError: return { 'error': 'This endpoint only consumes valid input. Invalid txid' } rev = raw_revision() cblock = rev['last_block'] ckey = "data:tx:" + str(transaction_) try: txJson = json.loads(lGet(ckey)) print_debug(("cache looked success", ckey), 7) except: print_debug(("cache looked failed", ckey), 7) if len(transaction_) == 64: #ROWS=dbSelect("select txj.txdata, extract(epoch from t.txrecvtime) from transactions t, txjson txj where t.txdbserialnum = txj.txdbserialnum and t.protocol != 'Bitcoin' and t.txhash=%s", [transaction_]) ROWS = dbSelect( "select txdata,txdbserialnum from txjson where txdata->>'txid'=%s", [transaction_]) else: ROWS = [] if len(ROWS) < 1: txJson = {'txid': transaction_, 'type': 'Error - Not Found'} else: try: txj = json.loads(ROWS[0][0]) except TypeError: txj = ROWS[0][0] try: if 'type_int' not in txj and txj['type'] == "DEx Purchase": txj['type_int'] = -22 except: pass txJson = addName(txj, getpropnamelist()) if 'blocktime' not in txJson: try: txdbserial = ROWS[0][1] blk_time = dbSelect( "select extract(epoch from txrecvtime) from transactions where txdbserialnum = %s", [txdbserial]) txJson['blocktime'] = int(blk_time[0][0]) except: pass lSet(ckey, json.dumps(txJson)) try: #check if tx is unconfirmed and expire cache after 5 min if it is otherwise 4 weeks if txJson['confirmations'] == 0: lExpire(ckey, 300) else: lExpire(ckey, 2419200) except: lExpire(ckey, 100) try: if 'type_int' not in txJson and txJson['type'] == "DEx Purchase": txJson['type_int'] = -22 except: pass try: txJson['confirmations'] = cblock - txJson['block'] + 1 except: pass try: #if cblock hasn't caught up make sure we don't return negative weirdness if txJson['confirmations'] < 0: txJson['confirmations'] = 0 except: pass return txJson
def getrecenttxpages(page=1): #pagination starts at 1 so adjust accordingly to treat page 0 and 1 the same try: page = int(page) except: page = 1 page -= 1 if page < 0: page = 0 try: offset = int(page) * 10 except: offset = 0 page = 0 filters = { 0: [0], 3: [3], 20: [20, 22, -21], 25: [25, 26, 27, 28], 50: [50, 51, 54], 55: [55], 56: [56] } try: filter = int(request.form['tx_type']) tx_type = filters[filter] except: filter = 9999 tx_type = None rev = raw_revision() cblock = rev['last_block'] toadd = [] limit = 10 #ckey="data:tx:general:"+str(cblock)+":"+str(filter)+":"+str(page) ckey = "data:tx:general:" + str(cblock) + ":" + str(page) try: response = json.loads(lGet(ckey)) print_debug(("cache looked success", ckey), 7) except: print_debug(("cache looked failed", ckey), 7) raw = getrawpending() try: pending = raw['data'] count = len(pending) if count > 0: max = offset + 10 if max > count: max = count for x in range(offset, max): toadd.append(pending[x]) limit -= len(toadd) offset -= count if offset < 0: offset = 0 except Exception as e: print_debug(("getgeneral pending inject failed", e), 2) pass #if filter==9999: ROWS = dbSelect( "select txdata from txjson txj where protocol = 'Omni' and txdbserialnum > 0 order by txdbserialnum DESC offset %s limit %s;", (offset, limit)) #else: # ROWS=dbSelect("select txdata from txjson where cast(txdata->>'type_int' as numeric) = ANY(%s) and " # "protocol = 'Omni' and txdbserialnum > 0 order by txdbserialnum DESC offset %s limit %s;",(tx_type,offset,limit)) data = [] pnl = getpropnamelist() if len(ROWS) > 0: for d in ROWS: print d[0], type(d[0]) res = addName(d[0], pnl) try: res['confirmations'] = cblock - res['block'] + 1 except: pass #if cblock hasn't caught up make sure we don't return negative weirdness #print "res['confirmations']",res['confirmations'] if res['confirmations'] < 0: res['confirmations'] = 0 data.append(res) pages = getpagecounttxjson() data = toadd + data response = {'pages': pages, 'transactions': data} #cache pages for 5 min lSet(ckey, json.dumps(response)) lExpire(ckey, 300) cachetxs(data) return jsonify(response)
def getaddresshistraw(address, page): rev = raw_revision() cblock = rev['last_block'] try: page = int(page) except: page = 1 pcount = getaddresstxcount(address) adjpage = page adjpage -= 1 if adjpage < 0: adjpage = 0 if adjpage > pcount: adjpage = pcount toadd = [] limit = 10 offset = adjpage * 10 ckey = "data:addrhist:" + str(address) + ":" + str(adjpage) try: #check cache txlist = json.loads(lGet(ckey)) print_debug(("cache looked success", ckey), 7) except: print_debug(("cache looked failed", ckey), 7) raw = getrawpending() try: if address in raw['index']: pending = raw['index'][address] count = len(pending) if count > 0: max = offset + 10 if max > count: max = count for x in range(offset, max): toadd.append(pending[x]) limit -= len(toadd) offset -= count if offset < 0: offset = 0 if limit < 0: limit = 0 except Exception as e: print_debug(("getaddresshistraw pending inject failed", e), 2) pass ROWS = [] if limit > 0: #ROWS=dbSelect("select txj.txdata from txjson txj, (select distinct txdbserialnum from addressesintxs where address=%s and txdbserialnum > 0) q where q.txdbserialnum=txj.txdbserialnum order by txj.txdbserialnum desc limit %s offset %s",(address,limit,offset)) #ROWS=dbSelect("select txdata from txjson where (txdata->>'sendingaddress'=%s or txdata->>'referenceaddress'=%s) and txdbserialnum > 0 order by txdbserialnum desc limit %s offset %s",(address,address,limit,offset)) ROWS = dbSelect( "with temp as (select distinct(txdbserialnum) as txdbserialnum from addressesintxs where address=%s and txdbserialnum > 0 order by txdbserialnum desc limit %s offset %s) select txj.txdata from txjson txj, temp where txj.txdbserialnum=temp.txdbserialnum", (address, limit, offset)) #set and cache data for 7 min pnl = getpropnamelist() txlist = [] for r in ROWS: txJson = addName(r[0], pnl) txlist.append(txJson) txlist = toadd + txlist lSet(ckey, json.dumps(txlist)) lExpire(ckey, 420) try: for tx in txlist: tx['confirmations'] = cblock - tx['block'] + 1 except: pass cachetxs(txlist) response = { 'address': address, 'transactions': txlist, 'pages': pcount, 'current_page': page } return response
def getrecenttxpages(page=1): #pagination starts at 1 so adjust accordingly to treat page 0 and 1 the same try: page = int(page) except: page = 1 page -= 1 if page < 0: page = 0 try: offset = int(page) * 10 except: offset = 0 page = 0 rev = raw_revision() cblock = rev['last_block'] toadd = [] limit = 10 ckey = "data:tx:general:" + str(cblock) + ":" + str(page) try: response = json.loads(lGet(ckey)) print_debug(("cache looked success", ckey), 7) except: print_debug(("cache looked failed", ckey), 7) raw = getrawpending() try: pending = raw['data'] count = len(pending) if count > 0: max = offset + 10 if max > count: max = count for x in range(offset, max): toadd.append(pending[x]) limit -= len(toadd) offset -= count if offset < 0: offset = 0 except Exception as e: print_debug(("getgeneral pending inject failed", e), 2) pass ROWS = dbSelect( "select txdata from txjson txj where protocol = 'Omni' and txdbserialnum > 0 order by txdbserialnum DESC offset %s limit %s;", (offset, limit)) data = [] pnl = getpropnamelist() if len(ROWS) > 0: for d in ROWS: res = addName(d[0], pnl) try: res['confirmations'] = cblock - res['block'] + 1 except: pass #if cblock hasn't caught up make sure we don't return negative weirdness if res['confirmations'] < 0: res['confirmations'] = 0 data.append(res) pages = getpagecounttxjson() data = toadd + data response = {'pages': pages, 'transactions': data} #cache pages for 5 min lSet(ckey, json.dumps(response)) lExpire(ckey, 300) cachetxs(data) return jsonify(response)