def __init__(self, blocks=None, partial=False): self.config = get_config() self.mongo = self.config.mongo self.blocks = [] last_index = None for block in blocks: if not isinstance(block, Block): block = Block.from_dict(block) if last_index and (block.index - last_index) != 1: raise Exception( 'Either incomplete blockchain or unordered. block {} vs last {}' .format(block.index, last_index)) # In that case: (most often, dup buried block), check if block n+1 exists then remove the wrong block(s) # see when inserting/replacing block how the dup insert occurs. self.blocks.append(block) last_index = block.index self.partial = partial if not self.blocks: return # allow nothing if self.blocks and self.blocks[0].index != 0 and not self.partial: raise Exception( 'Blocks do not start with zero index. Either incomplete blockchain or unordered.' )
def save_my_peer(cls, network): config = get_config() peer = config.peer_host + ":" + str(config.peer_port) config.mongo.db.config.update({'mypeer': { "$ne": "" }}, { 'mypeer': peer, 'network': config.network }, upsert=True) if config.network == 'regnet': return if not config.post_peer: return if config.debug: # Do not report debug nodes return url = 'https://yadacoin.io/peers' # default url if network == 'testnet': url = 'https://yadacoin.io:444/peers' try: requests.post(url, json.dumps({ 'host': config.peer_host, 'port': config.peer_port, 'bulletin_secret': config.get_bulletin_secret() }), headers={"Content-Type": "application/json"}) except: # TODO: catch specific exception print('ERROR: failed to get peers, exiting...') exit()
def ws_init(): global SIO SIO = AsyncServer(async_mode='tornado', logger=False, engineio_logger=None) # see https://github.com/miguelgrinberg/python-socketio/blob/master/examples/server/tornado/app.py SIO.register_namespace(ChatNamespace('/chat')) # See https://python-socketio.readthedocs.io/en/latest/server.html#namespaces if get_config().max_miners > 0: # Only register pool namespace if we want to run a pool SIO.register_namespace(PoolNamespace('/pool'))
def __init__(self, block_height, time='', rid='', transaction_signature='', relationship='', public_key='', dh_public_key='', fee=0.0, requester_rid='', requested_rid='', txn_hash='', inputs='', outputs='', coinbase=False, signatures=None, extra_blocks=None, raw=False): self.config = get_config() self.mongo = self.config.mongo self.app_log = getLogger('tornado.application') self.block_height = block_height self.time = time self.rid = rid self.transaction_signature = transaction_signature self.relationship = relationship self.public_key = public_key self.dh_public_key = dh_public_key self.fee = fee self.requester_rid = requester_rid self.requested_rid = requested_rid self.hash = txn_hash self.outputs = [] self.extra_blocks = extra_blocks self.raw = raw for x in outputs: self.outputs.append(Output.from_dict(x)) self.inputs = [] for x in inputs: if 'signature' in x and 'public_key' in x and 'address' in x: self.inputs.append(ExternalInput.from_dict(x)) else: self.inputs.append(Input.from_dict(x)) self.coinbase = coinbase if not signatures: signatures = [] self.signatures = [] for signature in signatures: if isinstance(signature, FastGraphSignature): self.signatures.append(signature) else: self.signatures.append(FastGraphSignature(signature))
def __init__(self, peer): self.client = AsyncClient(reconnection=False, logger=False) self.peer = peer self.config = get_config() self.consensus = self.config.consensus self.peers = self.config.peers self.app_log = getLogger("tornado.application") self.latest_peer_block = None self.connected = False self.probable_old = False
def __init__(self): self.config = get_config() self.mongo = self.config.mongo self.network = self.config.network self.my_peer = None self.app_log = getLogger("tornado.application") self.inbound = {} # a dict of inbound streams, keys are sids self.outbound = {} # a dict of outbound streams, keys are ips self.connected_ips = [] # a list of peers ip we're connected to # I chose to have 2 indexs and more memory footprint rather than iterating over one to get the other. self.probable_old_nodes = { } # dict : keys are ip, value time when to delete from list self.syncing = False my_peer = self.mongo.db.config.find_one({'mypeer': {"$ne": ""}}) if my_peer: self.my_peer = my_peer.get('mypeer') # str self.app_log.debug(self.my_peer)
def __init__(self, version=0, block_time=0, block_index=-1, prev_hash='', nonce: str = '', transactions=None, block_hash='', merkle_root='', public_key='', signature='', special_min: bool = False, header='', target: int = 0, special_target: int = 0): self.app_log = getLogger('tornado.application') self.config = get_config() self.mongo = self.config.mongo self.version = version self.time = block_time self.index = block_index self.prev_hash = prev_hash self.nonce = nonce self.transactions = transactions # txn_hashes = self.get_transaction_hashes() # self.set_merkle_root(txn_hashes) self.merkle_root = merkle_root self.verify_merkle_root = '' self.hash = block_hash self.public_key = public_key self.signature = signature self.special_min = special_min self.target = target self.special_target = special_target if target == 0: # Same call as in new block check - but there's a circular reference here. latest_block = self.config.BU.get_latest_block() if not latest_block: self.target = CHAIN.MAX_TARGET else: self.target = BlockFactory.get_target( self.index, Block.from_dict(latest_block), self, self.config.consensus.existing_blockchain) self.special_target = self.target # TODO: do we need recalc special target here if special min? self.header = header
def __init__(self, debug=False, peers=None, prevent_genesis=False): self.app_log = logging.getLogger("tornado.application") self.debug = debug self.config = get_config() self.mongo = self.config.mongo self.prevent_genesis = prevent_genesis if peers: self.peers = peers else: self.peers = Peers() latest_block = self.config.BU.get_latest_block() if latest_block: self.latest_block = Block.from_dict(latest_block) else: if not self.prevent_genesis: self.insert_genesis() self.existing_blockchain = Blockchain(self.config.BU.get_blocks())
async def on_connect(self): self.app_log = getLogger("tornado.application") print("CONNECT WS") self.config = get_config() self.mongo = self.config.mongo _, ip_port = self.client.connection_url.split('//') # extract ip:port self.ip, self.port = ip_port.split(':') self.app_log.debug('ws client /Chat connected to {}:{} - {}'.format( self.ip, self.port, self.client)) self.client.manager.connected = True await self.emit('hello', data={ "version": 2, "ip": self.config.public_ip, "port": self.config.peer_port }, namespace="/chat") # ask the peer active list await self.emit('get_peers', data={}, namespace="/chat")
def __init__(self, host, port, bulletin_secret=None, is_me=False, stream=None, inbound=False, sid=None): self.config = get_config() self.mongo = self.config.mongo self.client = None self.host = host self.port = port self.bulletin_secret = bulletin_secret self.is_me = is_me self.app_log = getLogger("tornado.application") self.stream = stream # for async http self.inbound = inbound # Is this an inbound connection? If it is, we can't rely on the port. self.sid = sid # This is the websocket session id
def __init__( self, block_height, txn_time='', rid='', transaction_signature='', relationship='', public_key='', dh_public_key='', fee=0.0, requester_rid='', requested_rid='', txn_hash='', inputs='', outputs='', coinbase=False, extra_blocks=None ): self.config = get_config() self.mongo = self.config.mongo self.block_height = block_height self.time = txn_time self.rid = rid self.transaction_signature = transaction_signature self.relationship = relationship self.public_key = public_key self.dh_public_key = dh_public_key if dh_public_key else '' self.fee = float(fee) self.requester_rid = requester_rid if requester_rid else '' self.requested_rid = requested_rid if requested_rid else '' self.hash = txn_hash self.outputs = [] self.extra_blocks = extra_blocks for x in outputs: self.outputs.append(Output.from_dict(x)) self.inputs = [] for x in inputs: if 'signature' in x and 'public_key' in x and 'address' in x: self.inputs.append(ExternalInput.from_dict(x)) else: self.inputs.append(Input.from_dict(x)) self.coinbase = coinbase
async def on_connect(self, sid, environ): if not 'config' in self.__dict__: # ChatNamespace is a singleton, same instance for everyone self.config = get_config() # Will be done once at first request self.mongo = self.config.mongo self.app_log = getLogger("tornado.application") self.peers = self.config.peers self.consensus = self.config.consensus IP = environ['tornado.handler'].request.remote_ip if self.peers.free_inbound_slots <= 0: self.app_log.warning( 'No free slot, client rejected: {}'.format(IP)) return False # This will close the socket if not self.peers.allow_ip(IP): self.app_log.info('Client rejected: {}'.format(IP)) return False # This will close the socket self.config.peers.on_new_ip( IP) # Store the ip to avoid duplicate connections await self.save_session(sid, {'ip': IP}) if self.config.debug: self.app_log.info('Client connected: {}'.format(sid))
async def on_connect(self, sid, environ): if not 'config' in self.__dict__: # PoolNamespace is a singleton, same instance for everyone self.config = get_config() # Will be done once at first request self.app_log = getLogger("tornado.application") self.mp = MiningPool() await self.mp.refresh() # This will create the block factory self.config.mp = self.mp IP = environ['REMOTE_ADDR'] if self.mp.free_inbound_slots <= 0: self.app_log.warning('No free slot, Miner rejected: {}'.format(IP)) return False # This will close the socket """if not self.mp.allow_ip(IP): self.app_log.info('Miner rejected: {}'.format(IP)) return False # This will close the socket """ # TODO: we could also limit the sid per IP self.mp.on_new_ip(IP) # Store the ip to avoid duplicate connections await self.save_session(sid, {'IP': IP}) if self.config.debug: self.app_log.info('Miner connected: {} {}'.format(IP, sid))
def init_my_peer(cls, network): config = get_config() if config.use_pnp: import socket from miniupnpc import UPnP # deploy as an eventlet WSGI server try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.bind((config.serve_host, 0)) server_port = sock.getsockname()[1] sock.close() eport = server_port u = UPnP(None, None, 200, 0) u.discover() u.selectigd() r = u.getspecificportmapping(eport, 'TCP') while r is not None and eport < 65536: eport = eport + 1 r = u.getspecificportmapping(eport, 'TCP') b = u.addportmapping(eport, 'TCP', u.lanaddr, server_port, 'UPnP YadaCoin Serve port %u' % eport, '') config.serve_host = '0.0.0.0' config.serve_port = server_port config.peer_host = u.externalipaddress() config.peer_port = server_port except Exception as e: print(e) config.serve_host = config.serve_host config.serve_port = config.serve_port config.peer_host = config.peer_host config.peer_port = config.peer_port print('UPnP failed: you must forward and/or whitelist port', config.peer_port) cls.save_my_peer(network) return cls(config.peer_host, config.peer_port)
def __init__(self): self.config = get_config() self.mongo = self.config.mongo self.app_log = getLogger('tornado.application')
async def integrate_block_with_existing_chain(self, block: Block, extra_blocks=None): """Even in case of retrace, this is the only place where we insert a new block into the block collection and update BU""" try: # TODO: reorg the checks, to have the faster ones first. # Like, here we begin with checking every tx one by one, when <e did not even check index and provided hash matched previous one. try: block.verify() except Exception as e: print("Integrate block error 1", e) return False for transaction in block.transactions: try: if extra_blocks: transaction.extra_blocks = extra_blocks transaction.verify() except InvalidTransactionException as e: print(e) return False except InvalidTransactionSignatureException as e: print(e) return False except MissingInputTransactionException as e: print(e) return False except NotEnoughMoneyException as e: print(e) return False except Exception as e: print(e) return False if block.index == 0: return True height = block.index last_block = self.existing_blockchain.blocks[block.index - 1] if last_block.index != (block.index - 1) or last_block.hash != block.prev_hash: print("Integrate block error 2") raise ForkException() if not last_block: print("Integrate block error 3") raise ForkException() target = BlockFactory.get_target(height, last_block, block, self.existing_blockchain) delta_t = int(time()) - int(last_block.time) special_target = CHAIN.special_target(block.index, block.target, delta_t, get_config().network) target_block_time = CHAIN.target_block_time(self.config.network) if block.index >= 35200 and delta_t < 600 and block.special_min: raise Exception('Special min block too soon') # TODO: use a CHAIN constant for pow blocks limits if ((int(block.hash, 16) < target) or (block.special_min and int(block.hash, 16) < special_target) or (block.special_min and block.index < 35200) or (block.index >= 35200 and block.index < 38600 and block.special_min and (int(block.time) - int(last_block.time)) > target_block_time) ): if last_block.index == ( block.index - 1) and last_block.hash == block.prev_hash: # self.mongo.db.blocks.update({'index': block.index}, block.to_dict(), upsert=True) # self.mongo.db.blocks.remove({'index': {"$gt": block.index}}, multi=True) # todo: is this useful? can we have more blocks above? No because if we had, we would have raised just above await self.mongo.async_db.block.delete_many( {'index': { "$gte": block.index }}) await self.mongo.async_db.blocks.replace_one( {'index': block.index}, block.to_dict(), upsert=True) # TODO: why do we need to keep that one in memory? try: self.existing_blockchain.blocks[block.index] = block del self.existing_blockchain.blocks[block.index + 1:] except: self.existing_blockchain.blocks.append(block) if self.debug: self.app_log.info( "New block inserted for height: {}".format( block.index)) await self.config.on_new_block( block) # This will propagate to BU return True else: print("Integrate block error 4") raise ForkException() else: print("Integrate block error 5") raise AboveTargetException() return False # unreachable code except Exception as e: exc_type, exc_obj, exc_tb = exc_info() fname = path.split(exc_tb.tb_frame.f_code.co_filename)[1] self.app_log.warning( "integrate_block_with_existing_chain {} {} {}".format( exc_type, fname, exc_tb.tb_lineno)) raise
def __init__( self, block_height, bulletin_secret='', username='', value=0, fee=0.0, requester_rid='', requested_rid='', public_key='', dh_public_key='', private_key='', dh_private_key='', to='', inputs='', outputs='', coinbase=False, chattext=None, signin=None, no_relationship=False ): self.config = get_config() self.mongo = self.config.mongo self.app_log = getLogger('tornado.application') self.block_height = block_height self.bulletin_secret = bulletin_secret self.username = username self.requester_rid = requester_rid self.requested_rid = requested_rid self.public_key = public_key self.dh_public_key = dh_public_key self.private_key = private_key self.value = value self.fee = float(fee) self.dh_private_key = dh_private_key self.to = to self.time = str(int(time.time())) self.outputs = [] self.no_relationship = no_relationship for x in outputs: self.outputs.append(Output.from_dict(x)) self.inputs = [] for x in inputs: if 'signature' in x and 'public_key' in x and 'address' in x: self.inputs.append(ExternalInput.from_dict(x)) else: self.inputs.append(Input.from_dict(x)) self.coinbase = coinbase self.chattext = chattext self.signin = signin self.do_money() inputs_concat = self.get_input_hashes() outputs_concat = self.get_output_hashes() if bulletin_secret: self.rid = self.generate_rid() if self.chattext: self.relationship = json.dumps({ "chatText": self.chattext }) self.cipher = Crypt(self.config.wif) self.encrypted_relationship = self.cipher.encrypt(self.relationship) elif self.signin: for shared_secret in TU.get_shared_secrets_by_rid(self.rid): self.relationship = SignIn(self.signin) self.cipher = Crypt(shared_secret.hex(), shared=True) self.encrypted_relationship = self.cipher.shared_encrypt(self.relationship.to_json()) break elif self.no_relationship: self.encrypted_relationship = '' else: if not self.dh_public_key or not self.dh_private_key: a = os.urandom(32).decode('latin1') self.dh_public_key = scalarmult_base(a).encode('latin1').hex() self.dh_private_key = a.encode().hex() self.relationship = self.generate_relationship() if not private_key: raise Exception('missing private key') self.cipher = Crypt(self.config.wif) self.encrypted_relationship = self.cipher.encrypt(self.relationship.to_json()) else: self.rid = '' self.encrypted_relationship = '' self.header = ( self.public_key + self.time + self.dh_public_key + self.rid + self.encrypted_relationship + "{0:.8f}".format(self.fee) + self.requester_rid + self.requested_rid + inputs_concat + outputs_concat ) self.hash = hashlib.sha256(self.header.encode('utf-8')).digest().hex() if self.private_key: self.transaction_signature = TU.generate_signature_with_private_key(private_key, self.hash) else: self.transaction_signature = '' self.transaction = self.generate_transaction()
def get_target(cls, height, last_block, block, blockchain) -> int: try: # change target max_target = CHAIN.MAX_TARGET if get_config().network in ['regnet', 'testnet']: return int(max_target) max_block_time = CHAIN.target_block_time(get_config().network) retarget_period = CHAIN.RETARGET_PERIOD # blocks max_seconds = CHAIN.TWO_WEEKS # seconds min_seconds = CHAIN.HALF_WEEK # seconds if height >= CHAIN.POW_FORK_V3: retarget_period = CHAIN.RETARGET_PERIOD_V3 max_seconds = CHAIN.MAX_SECONDS_V3 # seconds min_seconds = CHAIN.MIN_SECONDS_V3 # seconds elif height >= CHAIN.POW_FORK_V2: retarget_period = CHAIN.RETARGET_PERIOD_V2 max_seconds = CHAIN.MAX_SECONDS_V2 # seconds min_seconds = CHAIN.MIN_SECONDS_V2 # seconds if height > 0 and height % retarget_period == 0: get_config().debug_log( "RETARGET get_target height {} - last_block {} - block {}/time {}" .format(height, last_block.index, block.index, block.time)) block_from_2016_ago = Block.from_dict( get_config().BU.get_block_by_index(height - retarget_period)) get_config().debug_log( "Block_from_2016_ago - block {}/time {}".format( block_from_2016_ago.index, block_from_2016_ago.time)) two_weeks_ago_time = block_from_2016_ago.time elapsed_time_from_2016_ago = int( last_block.time) - int(two_weeks_ago_time) get_config().debug_log( "elapsed_time_from_2016_ago {} s {} days".format( int(elapsed_time_from_2016_ago), elapsed_time_from_2016_ago / (60 * 60 * 24))) # greater than two weeks? if elapsed_time_from_2016_ago > max_seconds: time_for_target = max_seconds get_config().debug_log("gt max") elif elapsed_time_from_2016_ago < min_seconds: time_for_target = min_seconds get_config().debug_log("lt min") else: time_for_target = int(elapsed_time_from_2016_ago) block_to_check = last_block if blockchain.partial: start_index = len(blockchain.blocks) - 1 else: start_index = last_block.index get_config().debug_log("start_index {}".format(start_index)) while 1: if block_to_check.special_min or block_to_check.target == max_target or not block_to_check.target: block_to_check = blockchain.blocks[start_index] start_index -= 1 else: target = block_to_check.target break get_config().debug_log("start_index2 {}, target {}".format( block_to_check.index, hex(int(target))[2:].rjust(64, '0'))) new_target = int((time_for_target * target) / max_seconds) get_config().debug_log("new_target {}".format( hex(int(new_target))[2:].rjust(64, '0'))) if new_target > max_target: target = max_target else: target = new_target elif height == 0: target = max_target else: block_to_check = block delta_t = int(block.time) - int(last_block.time) if block.index >= 38600 and delta_t > max_block_time and block.special_min: special_target = CHAIN.special_target( block.index, block.target, delta_t, get_config().network) return special_target block_to_check = last_block # this would be accurate. right now, it checks if the current block is under its own target, not the previous block's target if blockchain.partial: start_index = len(blockchain.blocks) - 1 else: start_index = last_block.index while 1: if start_index == 0: return block_to_check.target if block_to_check.special_min or block_to_check.target == max_target or not block_to_check.target: block_to_check = blockchain.blocks[start_index] start_index -= 1 else: target = block_to_check.target break return int(target) except Exception as e: import sys, os print("Exception {} get_target".format(e)) exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] print(exc_type, fname, exc_tb.tb_lineno) raise
def __init__(self): self.config = get_config() self.client = MongoClient(self.config.mongodb_host) self.db = self.client[self.config.database] self.site_db = self.client[self.config.site_database] try: # test connection self.db.yadacoin.find_one() except: if hasattr(self.config, 'mongod_path'): os.system('sudo {} --syslog --fork'.format( self.config.mongod_path)) else: os.system('sudo mongod --syslog --fork') __id = IndexModel([("id", ASCENDING)], name="__id", unique=True) __hash = IndexModel([("hash", ASCENDING)], name="__hash") __index = IndexModel([("index", ASCENDING)], name="__index") __to = IndexModel([("transactions.outputs.to", ASCENDING)], name="__to") __txn_id = IndexModel([("transactions.id", ASCENDING)], name="__txn_id") __txn_inputs_id = IndexModel([("transactions.inputs.id", ASCENDING)], name="__txn_inputs_id") try: self.db.blocks.create_indexes( [__hash, __index, __id, __to, __txn_id]) except: pass __id = IndexModel([("id", ASCENDING)], name="__id") __height = IndexModel([("height", ASCENDING)], name="__height") try: self.db.unspent_cache.create_indexes([__id, __height]) except: pass __id = IndexModel([("id", ASCENDING)], name="__id") __index = IndexModel([("index", ASCENDING)], name="__index") __block_hash = IndexModel([("block.hash", ASCENDING)], name="__block_hash") try: self.db.consensus.create_indexes([__id, __index, __block_hash]) except: pass __address = IndexModel([("address", ASCENDING)], name="__address") __index = IndexModel([("index", ASCENDING)], name="__index") __hash = IndexModel([("hash", ASCENDING)], name="__hash") try: self.db.shares.create_indexes([__address, __index, __hash]) except: pass __txn_id = IndexModel([("txn.id", ASCENDING)], name="__txn_id") try: self.db.transactions_by_rid_cache.create_indexes([__txn_id]) except: pass # TODO: add indexes for peers # See https://motor.readthedocs.io/en/stable/tutorial-tornado.html self.async_client = MotorClient(self.config.mongodb_host) self.async_db = self.async_client[self.config.database] self.async_site_db = self.async_client[self.config.site_database]