def inject(self, _async=True, preapply=True, check_result=True, num_blocks_wait=5): """ Inject the signed operation group. :param _async: do not wait for operation inclusion (default is True) :param preapply: do a preapply before injection :param check_result: raise RpcError in case operation is refused :param num_blocks_wait: number of blocks to wait for injection :returns: operation group with metadata (raw RPC response) """ self.context.reset() if preapply: opg_with_metadata = self.preapply() if not OperationResult.is_applied(opg_with_metadata): raise RpcError.from_errors( OperationResult.errors(opg_with_metadata)) opg_hash = self.shell.injection.operation.post( operation=self.binary_payload(), _async=False) if _async: return { 'chain_id': self.chain_id, 'hash': opg_hash, **self.json_payload() } else: for i in range(num_blocks_wait): self.shell.wait_next_block() try: pending_opg = self.shell.mempool.pending_operations[ opg_hash] if not OperationResult.is_applied(pending_opg): raise RpcError.from_errors( OperationResult.errors(pending_opg)) print(f'Still in mempool: {opg_hash}') except StopIteration: res = self.shell.blocks[-(i + 1):].find_operation(opg_hash) if check_result: if not OperationResult.is_applied(res): raise RpcError.from_errors( OperationResult.errors(res)) return res raise TimeoutError(opg_hash)
def autofill(self, gas_reserve=100, counter=None): """ Fill the gaps and then simulate the operation in order to calculate fee, gas/storage limits. :param gas_reserve: Add a safe reserve for gas limit (default is 100) :param counter: Override counter value (for manual handling) :rtype: OperationGroup """ opg = self.fill(counter=counter) opg_with_metadata = opg.run() if not OperationResult.is_applied(opg_with_metadata): raise RpcError.from_errors( OperationResult.errors(opg_with_metadata)) extra_size = (32 + 64) // len( opg.contents) + 1 # size of serialized branch and signature) def fill_content(content): if validation_passes[content['kind']] == 3: consumed_gas = OperationResult.consumed_gas( content) + gas_reserve paid_storage_size_diff = OperationResult.paid_storage_size_diff( content) burned = OperationResult.burned(content) fee = calculate_fee(content, consumed_gas, extra_size) content.update(gas_limit=str(consumed_gas + gas_reserve), storage_limit=str(paid_storage_size_diff + burned), fee=str(fee)) content.pop('metadata') return content opg.contents = list(map(fill_content, opg_with_metadata['contents'])) return opg
def inject(self, _async=True, check_result=True, num_blocks_wait=2): """ Inject signed operation group. :param _async: do not wait for operation inclusion (default is True) :param check_result: :param num_blocks_wait: """ opg_with_metadata = self.preapply() if not OperationResult.is_applied(opg_with_metadata): raise RpcError.from_errors( OperationResult.errors(opg_with_metadata)) from None opg_hash = self.shell.injection.operation.post( operation=self.binary_payload(), _async=False) if _async: return { 'chain_id': self.chain_id, 'hash': opg_hash, **self.json_payload() } else: for i in range(num_blocks_wait): self.shell.wait_next_block() try: pending_opg = self.shell.mempool.pending_operations[ opg_hash] if not OperationResult.is_applied(pending_opg): raise RpcError.from_errors( OperationResult.errors(pending_opg)) from None print(f'Still in mempool: {opg_hash}') except StopIteration: res = self.shell.blocks[-(i + 1):].find_operation(opg_hash) if check_result: if not OperationResult.is_applied(res): raise RpcError.from_errors( OperationResult.errors(res)) from None return res raise TimeoutError(opg_hash)
def from_operation_group(cls, operation_group: dict, **predicates): if not cls.is_applied(operation_group): raise RpcError.from_errors(cls.errors(operation_group)) from None def dispatch(content): if content['kind'] == 'transaction': return cls.from_transaction(content) elif content['kind'] == 'origination': return cls.from_origination(content) else: return content contents = cls.get_contents(operation_group, **predicates) return list(map(dispatch, contents))
def inject( self, check_result: bool = True, num_blocks_wait: int = 5, time_between_blocks: Optional[int] = None, block_timeout: Optional[int] = None, min_confirmations: int = 0, prevalidate: bool = True, **kwargs, ): """Inject the signed operation group. :param check_result: raise RpcError in case operation is applied but has runtime errors :param num_blocks_wait: number of blocks to wait for injection :param time_between_blocks: override the corresponding parameter from constants :param block_timeout: set block timeout (by default Pytezos will wait for a long time) :param min_confirmations: number of block injections to wait for before returning :param prevalidate: ask node to pre-validate the operation before the injection (True by default) :returns: operation group with metadata (raw RPC response) """ self.context.reset() # reset counter opg_hash = self.shell.injection.operation.post( operation=self.binary_payload(), _async=not prevalidate, ) if min_confirmations == 0: return { 'chain_id': self.chain_id, 'hash': opg_hash, **self.json_payload(), } operations = self.shell.wait_operations( opg_hashes=[opg_hash], ttl=num_blocks_wait, min_confirmations=min_confirmations, time_between_blocks=time_between_blocks, block_timeout=block_timeout, ) assert len(operations) == 1 if check_result: if not OperationResult.is_applied(operations[0]): raise RpcError.from_errors( OperationResult.errors(operations[0])) return operations[0]
def from_operation_group(cls, operation_group: dict, **predicates): """ Initialize with operation group contents. :param operation_group: operation_group: {"branch": "B...", "contents": [...], ...} \ :param predicates: filter contents using predicates `field=value` :rtype: List[OperationResult] """ if not cls.is_applied(operation_group): raise RpcError.from_errors(cls.errors(operation_group)) def dispatch(content): if content['kind'] == 'transaction': return cls.from_transaction(content) elif content['kind'] == 'origination': return cls.from_origination(content) else: return content contents = cls.get_contents(operation_group, **predicates) return list(map(dispatch, contents))
def _check_op(self, op): """ Returns None if operation is not completed Raises error if operation failed Return operation result if operation is completed """ op_data = op[0] if isinstance(op, tuple) else op op_hash = op_data["hash"] blocks = self.client.shell.blocks[-self.block_depth:] try: res = blocks.find_operation(op_hash) if not OperationResult.is_applied(res): raise RpcError.from_errors( OperationResult.errors(res)) from op_hash print(pformat_consumed_gas(res)) return res except StopIteration: # not found return None
def autofill( self, gas_reserve: int = DEFAULT_GAS_RESERVE, burn_reserve: int = DEFAULT_BURN_RESERVE, counter: Optional[int] = None, ttl: Optional[int] = None, fee: Optional[int] = None, gas_limit: Optional[int] = None, storage_limit: Optional[int] = None, **kwargs, ) -> 'OperationGroup': """Fill the gaps and then simulate the operation in order to calculate fee, gas/storage limits. :param gas_reserve: Add a safe reserve for dynamically calculated gas limit (default is 100). :param burn_reserve: Add a safe reserve for dynamically calculated storage limit (default is 100). :param counter: Override counter value (for manual handling) :param ttl: Number of blocks to wait in the mempool before removal (default is 5 for public network, 60 for sandbox) :param fee: Explicitly set fee for operation. If not set fee will be calculated depending on results of operation dry-run. :param gas_limit: Explicitly set gas limit for operation. If not set gas limit will be calculated depending on results of operation dry-run. :param storage_limit: Explicitly set storage limit for operation. If not set storage limit will be calculated depending on results of operation dry-run. :rtype: OperationGroup """ if kwargs.get('branch_offset') is not None: logger.warning('`branch_offset` argument is deprecated, use `ttl` instead') ttl = MAX_OPERATIONS_TTL - kwargs['branch_offset'] opg = self.fill(counter=counter, ttl=ttl) opg_with_metadata = opg.run() if not OperationResult.is_applied(opg_with_metadata): raise RpcError.from_errors(OperationResult.errors(opg_with_metadata)) extra_size = (32 + 64) // len(opg.contents) + 1 # size of serialized branch and signature def fill_content(content: Dict[str, Any]) -> Dict[str, Any]: if validation_passes[content['kind']] == 3: _gas_limit, _storage_limit, _fee = gas_limit, storage_limit, fee if _gas_limit is None: _gas_limit = OperationResult.consumed_gas(content) if content['kind'] in ['origination', 'transaction']: _gas_limit += gas_reserve if storage_limit is None: _paid_storage_size_diff = OperationResult.paid_storage_size_diff(content) _burned = OperationResult.burned(content) _storage_limit = _paid_storage_size_diff + _burned if content['kind'] in ['origination', 'transaction']: _storage_limit += burn_reserve if _fee is None: _fee = calculate_fee(content, _gas_limit, extra_size) current_counter = int(content['counter']) content.update( gas_limit=str(_gas_limit), storage_limit=str(_storage_limit), fee=str(_fee), counter=str(current_counter + self.context.get_counter_offset()), ) content.pop('metadata') logger.debug("autofilled transaction content: %s" % content) return content opg.contents = list(map(fill_content, opg_with_metadata['contents'])) return opg
def inject( self, check_result: bool = True, num_blocks_wait: int = 5, time_between_blocks: Optional[int] = None, min_confirmations: int = 0, **kwargs ): """Inject the signed operation group. :param check_result: raise RpcError in case operation is applied but has runtime errors :param num_blocks_wait: number of blocks to wait for injection :param time_between_blocks: override the corresponding parameter from constants :param min_confirmations: number of block injections to wait for before returning :returns: operation group with metadata (raw RPC response) """ if kwargs.get('_async'): logger.warning('`_async` argument is deprecated, use `min_confirmations` instead') min_confirmations = 0 if kwargs['_async'] is True else 1 self.context.reset() opg_hash = self.shell.injection.operation.post( operation=self.binary_payload(), _async=False, ) if min_confirmations == 0: return { 'chain_id': self.chain_id, 'hash': opg_hash, **self.json_payload(), } logger.info('Waiting for %s confirmations in %s blocks', min_confirmations, num_blocks_wait) in_mempool = True confirmations = 0 for _ in range(num_blocks_wait): logger.info('Waiting for the next block') self.shell.wait_next_block(time_between_blocks=time_between_blocks) if in_mempool: try: pending_opg = self.shell.mempool.pending_operations[opg_hash] if not OperationResult.is_applied(pending_opg): raise RpcError.from_errors(OperationResult.errors(pending_opg)) logger.info('Operation %s is still in mempool', opg_hash) continue except StopIteration: in_mempool = False try: res = self.shell.blocks[-1:].find_operation(opg_hash) except StopIteration: logger.info('Operation %s not found in lastest block', opg_hash) continue if check_result: if not OperationResult.is_applied(res): raise RpcError.from_errors(OperationResult.errors(res)) confirmations += 1 logger.info('Got %s/%s confirmations', confirmations, min_confirmations) if confirmations == min_confirmations: return res raise TimeoutError(f'Operation {opg_hash} got {confirmations} confirmations in {num_blocks_wait} blocks')