Esempio n. 1
0
    def inject(self,
               _async=True,
               preapply=True,
               check_result=True,
               num_blocks_wait=5):
        """ Inject the signed operation group.

        :param _async: do not wait for operation inclusion (default is True)
        :param preapply: do a preapply before injection
        :param check_result: raise RpcError in case operation is refused
        :param num_blocks_wait: number of blocks to wait for injection
        :returns: operation group with metadata (raw RPC response)
        """
        self.context.reset()
        if preapply:
            opg_with_metadata = self.preapply()
            if not OperationResult.is_applied(opg_with_metadata):
                raise RpcError.from_errors(
                    OperationResult.errors(opg_with_metadata))

        opg_hash = self.shell.injection.operation.post(
            operation=self.binary_payload(), _async=False)

        if _async:
            return {
                'chain_id': self.chain_id,
                'hash': opg_hash,
                **self.json_payload()
            }
        else:
            for i in range(num_blocks_wait):
                self.shell.wait_next_block()
                try:
                    pending_opg = self.shell.mempool.pending_operations[
                        opg_hash]
                    if not OperationResult.is_applied(pending_opg):
                        raise RpcError.from_errors(
                            OperationResult.errors(pending_opg))
                    print(f'Still in mempool: {opg_hash}')
                except StopIteration:
                    res = self.shell.blocks[-(i + 1):].find_operation(opg_hash)
                    if check_result:
                        if not OperationResult.is_applied(res):
                            raise RpcError.from_errors(
                                OperationResult.errors(res))
                    return res

        raise TimeoutError(opg_hash)
Esempio n. 2
0
    def autofill(self, gas_reserve=100, counter=None):
        """ Fill the gaps and then simulate the operation in order to calculate fee, gas/storage limits.

        :param gas_reserve: Add a safe reserve for gas limit (default is 100)
        :param counter: Override counter value (for manual handling)
        :rtype: OperationGroup
        """
        opg = self.fill(counter=counter)
        opg_with_metadata = opg.run()
        if not OperationResult.is_applied(opg_with_metadata):
            raise RpcError.from_errors(
                OperationResult.errors(opg_with_metadata))

        extra_size = (32 + 64) // len(
            opg.contents) + 1  # size of serialized branch and signature)

        def fill_content(content):
            if validation_passes[content['kind']] == 3:
                consumed_gas = OperationResult.consumed_gas(
                    content) + gas_reserve
                paid_storage_size_diff = OperationResult.paid_storage_size_diff(
                    content)
                burned = OperationResult.burned(content)
                fee = calculate_fee(content, consumed_gas, extra_size)
                content.update(gas_limit=str(consumed_gas + gas_reserve),
                               storage_limit=str(paid_storage_size_diff +
                                                 burned),
                               fee=str(fee))

            content.pop('metadata')
            return content

        opg.contents = list(map(fill_content, opg_with_metadata['contents']))
        return opg
Esempio n. 3
0
    def inject(self, _async=True, check_result=True, num_blocks_wait=2):
        """
        Inject signed operation group.
        :param _async: do not wait for operation inclusion (default is True)
        :param check_result:
        :param num_blocks_wait:
        """
        opg_with_metadata = self.preapply()
        if not OperationResult.is_applied(opg_with_metadata):
            raise RpcError.from_errors(
                OperationResult.errors(opg_with_metadata)) from None

        opg_hash = self.shell.injection.operation.post(
            operation=self.binary_payload(), _async=False)

        if _async:
            return {
                'chain_id': self.chain_id,
                'hash': opg_hash,
                **self.json_payload()
            }
        else:
            for i in range(num_blocks_wait):
                self.shell.wait_next_block()
                try:
                    pending_opg = self.shell.mempool.pending_operations[
                        opg_hash]
                    if not OperationResult.is_applied(pending_opg):
                        raise RpcError.from_errors(
                            OperationResult.errors(pending_opg)) from None
                    print(f'Still in mempool: {opg_hash}')
                except StopIteration:
                    res = self.shell.blocks[-(i + 1):].find_operation(opg_hash)
                    if check_result:
                        if not OperationResult.is_applied(res):
                            raise RpcError.from_errors(
                                OperationResult.errors(res)) from None
                    return res

        raise TimeoutError(opg_hash)
Esempio n. 4
0
    def inject(
        self,
        check_result: bool = True,
        num_blocks_wait: int = 5,
        time_between_blocks: Optional[int] = None,
        block_timeout: Optional[int] = None,
        min_confirmations: int = 0,
        prevalidate: bool = True,
        **kwargs,
    ):
        """Inject the signed operation group.

        :param check_result: raise RpcError in case operation is applied but has runtime errors
        :param num_blocks_wait: number of blocks to wait for injection
        :param time_between_blocks: override the corresponding parameter from constants
        :param block_timeout: set block timeout (by default Pytezos will wait for a long time)
        :param min_confirmations: number of block injections to wait for before returning
        :param prevalidate: ask node to pre-validate the operation before the injection (True by default)
        :returns: operation group with metadata (raw RPC response)
        """
        self.context.reset()  # reset counter

        opg_hash = self.shell.injection.operation.post(
            operation=self.binary_payload(),
            _async=not prevalidate,
        )

        if min_confirmations == 0:
            return {
                'chain_id': self.chain_id,
                'hash': opg_hash,
                **self.json_payload(),
            }

        operations = self.shell.wait_operations(
            opg_hashes=[opg_hash],
            ttl=num_blocks_wait,
            min_confirmations=min_confirmations,
            time_between_blocks=time_between_blocks,
            block_timeout=block_timeout,
        )

        assert len(operations) == 1
        if check_result:
            if not OperationResult.is_applied(operations[0]):
                raise RpcError.from_errors(
                    OperationResult.errors(operations[0]))

        return operations[0]
Esempio n. 5
0
    def _check_op(self, op):
        """
        Returns None if operation is not completed
        Raises error if operation failed
        Return operation result if operation is completed
        """

        op_data = op[0] if isinstance(op, tuple) else op
        op_hash = op_data["hash"]

        blocks = self.client.shell.blocks[-self.block_depth:]
        try:
            res = blocks.find_operation(op_hash)
            if not OperationResult.is_applied(res):
                raise RpcError.from_errors(
                    OperationResult.errors(res)) from op_hash
            print(pformat_consumed_gas(res))
            return res
        except StopIteration:
            # not found
            return None
Esempio n. 6
0
    def autofill(
        self,
        gas_reserve: int = DEFAULT_GAS_RESERVE,
        burn_reserve: int = DEFAULT_BURN_RESERVE,
        counter: Optional[int] = None,
        ttl: Optional[int] = None,
        fee: Optional[int] = None,
        gas_limit: Optional[int] = None,
        storage_limit: Optional[int] = None,
        **kwargs,
    ) -> 'OperationGroup':
        """Fill the gaps and then simulate the operation in order to calculate fee, gas/storage limits.

        :param gas_reserve: Add a safe reserve for dynamically calculated gas limit (default is 100).
        :param burn_reserve: Add a safe reserve for dynamically calculated storage limit (default is 100).
        :param counter: Override counter value (for manual handling)
        :param ttl: Number of blocks to wait in the mempool before removal (default is 5 for public network, 60 for sandbox)
        :param fee: Explicitly set fee for operation. If not set fee will be calculated depending on results of operation dry-run.
        :param gas_limit: Explicitly set gas limit for operation. If not set gas limit will be calculated depending on results of
            operation dry-run.
        :param storage_limit: Explicitly set storage limit for operation. If not set storage limit will be calculated depending on
            results of operation dry-run.
        :rtype: OperationGroup
        """
        if kwargs.get('branch_offset') is not None:
            logger.warning('`branch_offset` argument is deprecated, use `ttl` instead')
            ttl = MAX_OPERATIONS_TTL - kwargs['branch_offset']

        opg = self.fill(counter=counter, ttl=ttl)
        opg_with_metadata = opg.run()
        if not OperationResult.is_applied(opg_with_metadata):
            raise RpcError.from_errors(OperationResult.errors(opg_with_metadata))

        extra_size = (32 + 64) // len(opg.contents) + 1  # size of serialized branch and signature

        def fill_content(content: Dict[str, Any]) -> Dict[str, Any]:
            if validation_passes[content['kind']] == 3:
                _gas_limit, _storage_limit, _fee = gas_limit, storage_limit, fee

                if _gas_limit is None:
                    _gas_limit = OperationResult.consumed_gas(content)
                    if content['kind'] in ['origination', 'transaction']:
                        _gas_limit += gas_reserve

                if storage_limit is None:
                    _paid_storage_size_diff = OperationResult.paid_storage_size_diff(content)
                    _burned = OperationResult.burned(content)
                    _storage_limit = _paid_storage_size_diff + _burned
                    if content['kind'] in ['origination', 'transaction']:
                        _storage_limit += burn_reserve

                if _fee is None:
                    _fee = calculate_fee(content, _gas_limit, extra_size)

                current_counter = int(content['counter'])
                content.update(
                    gas_limit=str(_gas_limit),
                    storage_limit=str(_storage_limit),
                    fee=str(_fee),
                    counter=str(current_counter + self.context.get_counter_offset()),
                )

            content.pop('metadata')
            logger.debug("autofilled transaction content: %s" % content)
            return content

        opg.contents = list(map(fill_content, opg_with_metadata['contents']))
        return opg
Esempio n. 7
0
def sync_to_blockchain(is_dry_run=True, _async=False):
    print('starting sync')
    time.sleep(settings.BLOCKCHAIN_SYNC_WAIT_TIME)
    from apps.wallet.models import Wallet, MetaTransaction, Transaction, WalletPublicKeyTransferRequest, TRANSACTION_STATES

    pytezos_client = pytezos.using(
        key=settings.TEZOS_ADMIN_ACCOUNT_PRIVATE_KEY,
        shell=settings.TEZOS_NODE)
    token_contract = pytezos_client.contract(
        settings.TEZOS_TOKEN_CONTRACT_ADDRESS)

    funding_transactions = {}
    meta_transactions = []
    operation_groups = []

    state_update_items = []

    for transaction in Transaction.objects.exclude(
            state=TRANSACTION_STATES.PENDING.value).exclude(
                state=TRANSACTION_STATES.DONE.value).order_by('created_at'):
        state_update_items.append(transaction)
        if not transaction.from_wallet:
            operation_groups.append(
                token_contract.mint(
                    address=transaction.to_wallet.address,
                    decimals=transaction.to_wallet.currency.decimals,
                    name=transaction.to_wallet.currency.name,
                    token_id=transaction.to_wallet.currency.token_id,
                    symbol=transaction.to_wallet.currency.symbol,
                    amount=transaction.amount).operation_group.sign())
        elif MetaTransaction.objects.filter(pk=transaction.pk).exists():
            meta_transactions.append(
                MetaTransaction.objects.get(pk=transaction))
        else:
            same_from_txs = funding_transactions.get(
                transaction.from_wallet.address, [])
            same_from_txs.append({
                "to_": transaction.to_wallet.address,
                "token_id": transaction.to_wallet.currency.token_id,
                "amount": transaction.amount
            })
            funding_transactions[
                transaction.from_wallet.address] = same_from_txs

    # preparing funding
    if len(funding_transactions.items()) > 0:
        funding_transaction_payloads = list(
            map(lambda item: {
                "from_": item[0],
                "txs": item[1]
            }, funding_transactions.items()))
        operation_groups.append(
            token_contract.transfer(
                funding_transaction_payloads).operation_group.sign())

    # preparing meta
    if len(meta_transactions) > 0:
        meta_transaction_payloads = list(
            map(
                lambda meta_transaction: meta_transaction.
                to_meta_transaction_dictionary(), meta_transactions))
        operation_groups.append(
            token_contract.meta_transfer(
                meta_transaction_payloads).operation_group.sign())

    # wallet public key transfers
    wallet_public_key_transfer_payloads = []
    wallet_public_key_transfer_requests = []
    for wallet_public_key_transfer_request in WalletPublicKeyTransferRequest.objects.exclude(
            state=TRANSACTION_STATES.PENDING.value).exclude(
                state=TRANSACTION_STATES.DONE.value).order_by('created_at'):
        if wallet_public_key_transfer_request.wallet.balance > 0 and wallet_public_key_transfer_request.wallet.public_key != wallet_public_key_transfer_request.new_public_key:
            new_address = Wallet(public_key=wallet_public_key_transfer_request.
                                 new_public_key).address
            state_update_items.append(wallet_public_key_transfer_request)
            wallet_public_key_transfer_requests.append(
                wallet_public_key_transfer_request)
            wallet_public_key_transfer_payloads.append({
                "from_":
                wallet_public_key_transfer_request.wallet.address,
                "txs": [{
                    "to_":
                    new_address,
                    "token_id":
                    wallet_public_key_transfer_request.wallet.currency.
                    token_id,
                    "amount":
                    wallet_public_key_transfer_request.wallet.balance
                }]
            })
        else:
            wallet_public_key_transfer_request.old_public_key = wallet_public_key_transfer_request.wallet.public_key
            wallet_public_key_transfer_request.wallet.public_key = wallet_public_key_transfer_request.new_public_key
            wallet_public_key_transfer_request.wallet.save()
            wallet_public_key_transfer_request.state = TRANSACTION_STATES.DONE.value
            wallet_public_key_transfer_request.notes = "Has no balance or was recovering to same pubkey, transferred offchain"
            wallet_public_key_transfer_request.save()

    if len(wallet_public_key_transfer_payloads) > 0:
        operation_groups.append(
            token_contract.transfer(
                wallet_public_key_transfer_payloads).operation_group.sign())

    # merging all operations into one single group
    final_operation_group = None
    operation_counter = 0
    for operation_group in operation_groups:
        if final_operation_group == None:
            final_operation_group = operation_group
            operation_counter = int(operation_group.contents[0]['counter'])
        else:
            operation_counter += 1
            operation = operation_group.contents[0]
            operation['counter'] = str(operation_counter)
            final_operation_group = final_operation_group.operation(
                operation_group.contents[0])

    if final_operation_group != None:  # we have stuff to sync
        print(final_operation_group)
        operation_result = final_operation_group.sign().preapply()
        print(operation_result)
        if is_dry_run:
            return OperationResult.is_applied(operation_result)
        elif OperationResult.is_applied(operation_result):

            def update_sync_state(items,
                                  state=TRANSACTION_STATES.PENDING.value,
                                  notes='',
                                  operation_hash=''):
                for item in items:
                    type(item).objects.filter(pk=item.pk).update(
                        state=state,
                        notes=notes,
                        operation_hash=operation_hash,
                        submitted_to_chain_at=now())

            update_sync_state(state_update_items)
            try:
                is_confirmed_in_chain = False
                try:
                    operation_inject_result = final_operation_group.sign(
                    ).inject(_async=_async,
                             preapply=True,
                             check_result=True,
                             num_blocks_wait=settings.TEZOS_BLOCK_WAIT_TIME)
                    is_operation_applied = OperationResult.is_applied(
                        operation_inject_result)
                    is_confirmed_in_chain = True
                except AssertionError:
                    # here we assume that the operation was applied even if we know the assertion failed
                    is_operation_applied = True

                if is_operation_applied:
                    for wallet_public_key_transfer_request in wallet_public_key_transfer_requests:
                        wallet_public_key_transfer_request.old_public_key = wallet_public_key_transfer_request.wallet.public_key
                        wallet_public_key_transfer_request.wallet.public_key = wallet_public_key_transfer_request.new_public_key
                        wallet_public_key_transfer_request.wallet.save()
                        wallet_public_key_transfer_request.state = TRANSACTION_STATES.DONE.value
                        wallet_public_key_transfer_request.save()
                    if is_confirmed_in_chain:
                        update_sync_state(state_update_items,
                                          TRANSACTION_STATES.DONE.value,
                                          json.dumps(operation_inject_result),
                                          operation_inject_result['hash'])
                    else:
                        update_sync_state(state_update_items,
                                          TRANSACTION_STATES.DONE.value,
                                          json.dumps(operation_result), "*")
                else:
                    if operation_inject_result is None:
                        update_sync_state(
                            state_update_items,
                            TRANSACTION_STATES.FAILED.value,
                            'Error during sync: {}'.format(
                                json.dumps(operation_result)))
                    else:
                        update_sync_state(
                            state_update_items,
                            TRANSACTION_STATES.FAILED.value,
                            'Error during sync: {}'.format(
                                json.dumps(operation_inject_result)))
                return is_operation_applied
            except Exception as error:
                update_sync_state(
                    state_update_items, TRANSACTION_STATES.FAILED.value,
                    'Exception during sync: {}\nTraceback: {}'.format(
                        repr(error), traceback.format_exc()))
                return False
        else:
            return OperationResult.is_applied(operation_result)
Esempio n. 8
0
    def inject(
        self,
        check_result: bool = True,
        num_blocks_wait: int = 5,
        time_between_blocks: Optional[int] = None,
        min_confirmations: int = 0,
        **kwargs
    ):
        """Inject the signed operation group.

        :param check_result: raise RpcError in case operation is applied but has runtime errors
        :param num_blocks_wait: number of blocks to wait for injection
        :param time_between_blocks: override the corresponding parameter from constants
        :param min_confirmations: number of block injections to wait for before returning
        :returns: operation group with metadata (raw RPC response)
        """
        if kwargs.get('_async'):
            logger.warning('`_async` argument is deprecated, use `min_confirmations` instead')
            min_confirmations = 0 if kwargs['_async'] is True else 1

        self.context.reset()

        opg_hash = self.shell.injection.operation.post(
            operation=self.binary_payload(),
            _async=False,
        )

        if min_confirmations == 0:
            return {
                'chain_id': self.chain_id,
                'hash': opg_hash,
                **self.json_payload(),
            }

        logger.info('Waiting for %s confirmations in %s blocks', min_confirmations, num_blocks_wait)
        in_mempool = True
        confirmations = 0
        for _ in range(num_blocks_wait):
            logger.info('Waiting for the next block')
            self.shell.wait_next_block(time_between_blocks=time_between_blocks)

            if in_mempool:
                try:
                    pending_opg = self.shell.mempool.pending_operations[opg_hash]
                    if not OperationResult.is_applied(pending_opg):
                        raise RpcError.from_errors(OperationResult.errors(pending_opg))
                    logger.info('Operation %s is still in mempool', opg_hash)
                    continue
                except StopIteration:
                    in_mempool = False

            try:
                res = self.shell.blocks[-1:].find_operation(opg_hash)
            except StopIteration:
                logger.info('Operation %s not found in lastest block', opg_hash)
                continue

            if check_result:
                if not OperationResult.is_applied(res):
                    raise RpcError.from_errors(OperationResult.errors(res))

            confirmations += 1
            logger.info('Got %s/%s confirmations', confirmations, min_confirmations)
            if confirmations == min_confirmations:
                return res

        raise TimeoutError(f'Operation {opg_hash} got {confirmations} confirmations in {num_blocks_wait} blocks')
Esempio n. 9
0
    def wait(
        self,
        *operation_groups: OperationGroup,
        min_confirmations: int = 1,
        num_blocks_wait: int = 5,
        time_between_blocks: Optional[int] = None,
        prev_hash: Optional[str] = None,
    ) -> Tuple[OperationGroup, ...]:
        """Wait for multiple injected operations to get enough confirmations

        :param min_confirmations: number of block injections to wait for before returning
        :param num_blocks_wait: number of blocks to wait for injection
        :param time_between_blocks: override the corresponding parameter from constants
        :param prev_hash: Current block hash (optional). If not set, current head is used.
        """
        logger.info('Waiting for %s confirmations in %s blocks', min_confirmations, num_blocks_wait)
        confirmations = {opg.opg_hash: 0 for opg in operation_groups}
        for _ in range(num_blocks_wait):
            logger.info('Waiting for the next block')
            prev_hash = self.shell.wait_next_block(time_between_blocks=time_between_blocks, prev_hash=prev_hash)
            block_operations = self.shell.blocks[prev_hash].operations.managers()

            for opg in operation_groups:
                if confirmations[opg.opg_hash] == 0:
                    res = next((item for item in block_operations if item['hash'] == opg.opg_hash), None)
                    if res is not None:
                        logger.info('Operation %s was included in block %s', opg.opg_hash, prev_hash)
                        confirmations[opg.opg_hash] = 1
                        if not OperationResult.is_applied(res):
                            raise RpcError.from_errors(OperationResult.errors(res)) from None
                else:
                    confirmations[opg.opg_hash] += 1
                    logger.info('Got %s/%s confirmations for %s', confirmations[opg.opg_hash], min_confirmations, opg.opg_hash)

            if any(value == 0 for value in confirmations.values()):
                pending_operations = self.shell.mempool.pending_operations.flatten()
                for opg in operation_groups:
                    if confirmations[opg.opg_hash] == 0:
                        res = next((item for item in pending_operations if item['hash'] == opg.opg_hash), None)
                        if res is not None:
                            logger.info('Operation %s is still in mempool', opg.opg_hash)
                            if not OperationResult.is_applied(res):
                                raise RpcError.from_errors(OperationResult.errors(res)) from None

            for opg in operation_groups:
                if confirmations[opg.opg_hash] == 0:
                    confirmations[opg.opg_hash] = self.shell.get_confirmations(
                        opg_hash=opg.opg_hash,
                        kind=opg.contents[0]['kind'],
                        branch=opg.branch,
                        head=prev_hash,
                    )
                    if confirmations[opg.opg_hash] == 0:
                        raise ValueError(f'Operation {opg.opg_hash} is not found') from None

            if all(value >= min_confirmations for value in confirmations.values()):
                return operation_groups

        required_confirmations = min_confirmations * len(operation_groups)
        gathered_confirmations = sum(confirmations.values())
        raise TimeoutError(f'Operations got {gathered_confirmations}/{required_confirmations} confirmations in {num_blocks_wait} blocks')