def main(): global payments global myForgedBlocks global myLeases global myCanceledLeases print('getting blocks...') blocks = getAllBlocks() print('preparing datastructures...') blocks = prepareDataStructure(blocks) #clear payed tx for block in blocks: txs = [] if block['height'] < config['startBlock']: for tx in block['transactions']: if tx['type'] == 8 or tx['type'] == 9: txs.append(tx) else: txs = block['transactions'] block['transactions'] = txs #save current blocks print('saving blockfile...') with open(config['blockStorage'], 'w') as outfile: hyperjson.dump(blocks, outfile) print('preparing payments...') if config['endBlock'] == 0: endBlock = requests.get(config['node'] + '/blocks/height').json()['height'] - 1 else: endBlock = config['endBlock'] for block in myForgedBlocks: if block['height'] >= config['startBlock'] and block[ 'height'] <= endBlock: blockLeaseData = getActiveLeasesAtBlock(block) activeLeasesForBlock = blockLeaseData['activeLeases'] amountTotalLeased = blockLeaseData['totalLeased'] distribute(activeLeasesForBlock, amountTotalLeased, block) #remove excluded addresses for exclude in config['excludeListTN']: payments[exclude] = 0 print('excluding: ' + exclude) total = checkTotalDistributableAmount(payments) createPayment() print('forged blocks: ' + str(len(myForgedBlocks))) print('number of payments: ' + str(len(payments))) print('total payment: ' + str(total / pow(10, 8))) print('number of leases: ' + str(len(myLeases))) print('number of cancelled leases: ' + str(len(myCanceledLeases)))
def test_dumpToFileLikeObject(self): class filelike: def __init__(self): self.bytes = '' def write(self, bytes): self.bytes += bytes f = filelike() hyperjson.dump([1, 2, 3], f) self.assertEqual("[1, 2, 3]", f.bytes)
def createPayment(): global payments tx = [] for address in payments: if round(payments[address]) > config['minAmounttoPay']: paytx = {'recipient': address, 'amount': round(payments[address])} tx.append(paytx) with open(config['paymentStorage'], 'w') as outfile: hyperjson.dump(tx, outfile) print('payments written to ' + config['paymentStorage'])
def test_dumpToFile(self): f = six.StringIO() hyperjson.dump([1, 2, 3], f) self.assertEqual("[1, 2, 3]", f.getvalue())
def test_dump(): sio = StringIO() hyperjson.dump(['streaming API'], sio) assert sio.getvalue() == '["streaming API"]'
def test_dump_invalid_writer(): with pytest.raises(AttributeError): json.dump([], '') with pytest.raises(AttributeError): hyperjson.dump([], '')
def save_json(savej, outp_path): outp_dir = os.path.dirname(outp_path) if not os.path.isdir(outp_dir) and outp_dir != '': os.makedirs(outp_dir) with open(outp_path, 'w', newline='\n') as ofile: json.dump(savej, ofile) #hyperjson is missing ensure_ascii=True
'division_lower', 'division_original_lower', 'location_id', 'location', 'location_original', 'location_lower', 'location_original_lower', 'pangolin_lineage', 'pangolin_version', 'clade', ] muts_info = [ 'type', 'mutation', 'gene', 'ref_codon', 'pos', 'alt_codon', 'is_synonymous', 'ref_aa', 'codon_num', 'alt_aa', 'absolute_coords', 'change_length_nt', 'is_frameshift', 'deletion_codon_coords' ] print(f"Generating JSON...") start = time.time() json_data = (muts.groupby(meta_info, as_index=True).apply(lambda x: x[ muts_info].to_dict('records')).reset_index().rename(columns={ 0: 'mutations' }).to_json(orient='records')) json_time = time.time() - start print(f"Writing JSON to {out_fp}") start = time.time() with open(out_fp, 'w') as f: hyperjson.dump(json_data, f) io_time = time.time() - start print(f"JSON Execution time: {json_time} seconds") print(f"IO Execution time: {io_time} seconds")
def save_json(savej, outp_path): with open(outp_path, 'w', newline='\n') as ofile: json.dump(savej, ofile) #hyperjson is missing ensure_ascii=True