コード例 #1
0
    def autofill(self, gas_reserve=100, counter=None):
        """ Fill the gaps and then simulate the operation in order to calculate fee, gas/storage limits.

        :param gas_reserve: Add a safe reserve for gas limit (default is 100)
        :param counter: Override counter value (for manual handling)
        :rtype: OperationGroup
        """
        opg = self.fill(counter=counter)
        opg_with_metadata = opg.run()
        if not OperationResult.is_applied(opg_with_metadata):
            raise RpcError.from_errors(
                OperationResult.errors(opg_with_metadata))

        extra_size = (32 + 64) // len(
            opg.contents) + 1  # size of serialized branch and signature)

        def fill_content(content):
            if validation_passes[content['kind']] == 3:
                consumed_gas = OperationResult.consumed_gas(
                    content) + gas_reserve
                paid_storage_size_diff = OperationResult.paid_storage_size_diff(
                    content)
                burned = OperationResult.burned(content)
                fee = calculate_fee(content, consumed_gas, extra_size)
                content.update(gas_limit=str(consumed_gas + gas_reserve),
                               storage_limit=str(paid_storage_size_diff +
                                                 burned),
                               fee=str(fee))

            content.pop('metadata')
            return content

        opg.contents = list(map(fill_content, opg_with_metadata['contents']))
        return opg
コード例 #2
0
ファイル: group.py プロジェクト: baking-bad/pytezos
        def fill_content(content: Dict[str, Any]) -> Dict[str, Any]:
            if validation_passes[content['kind']] == 3:
                _gas_limit, _storage_limit, _fee = gas_limit, storage_limit, fee

                if _gas_limit is None:
                    _gas_limit = OperationResult.consumed_gas(content)
                    if content['kind'] in ['origination', 'transaction']:
                        _gas_limit += gas_reserve

                if storage_limit is None:
                    _paid_storage_size_diff = OperationResult.paid_storage_size_diff(content)
                    _burned = OperationResult.burned(content)
                    _storage_limit = _paid_storage_size_diff + _burned
                    if content['kind'] in ['origination', 'transaction']:
                        _storage_limit += burn_reserve

                if _fee is None:
                    _fee = calculate_fee(content, _gas_limit, extra_size)

                current_counter = int(content['counter'])
                content.update(
                    gas_limit=str(_gas_limit),
                    storage_limit=str(_storage_limit),
                    fee=str(_fee),
                    counter=str(current_counter + self.context.get_counter_offset()),
                )

            content.pop('metadata')
            logger.debug("autofilled transaction content: %s" % content)
            return content
コード例 #3
0
ファイル: group.py プロジェクト: tbinetruy/pytezos
        def fill_content(content):
            if validation_passes[content['kind']] == 3:
                consumed_gas = OperationResult.consumed_gas(
                    content) + gas_reserve
                paid_storage_size_diff = OperationResult.paid_storage_size_diff(
                    content)
                fee = calculate_fee(content, consumed_gas, extra_size)
                content.update(gas_limit=str(consumed_gas + gas_reserve),
                               storage_limit=str(paid_storage_size_diff +
                                                 burn_cap(content)),
                               fee=str(fee))

            content.pop('metadata')
            return content
コード例 #4
0
    def inject(
        self,
        check_result: bool = True,
        num_blocks_wait: int = 5,
        time_between_blocks: Optional[int] = None,
        block_timeout: Optional[int] = None,
        min_confirmations: int = 0,
        prevalidate: bool = True,
        **kwargs,
    ):
        """Inject the signed operation group.

        :param check_result: raise RpcError in case operation is applied but has runtime errors
        :param num_blocks_wait: number of blocks to wait for injection
        :param time_between_blocks: override the corresponding parameter from constants
        :param block_timeout: set block timeout (by default Pytezos will wait for a long time)
        :param min_confirmations: number of block injections to wait for before returning
        :param prevalidate: ask node to pre-validate the operation before the injection (True by default)
        :returns: operation group with metadata (raw RPC response)
        """
        self.context.reset()  # reset counter

        opg_hash = self.shell.injection.operation.post(
            operation=self.binary_payload(),
            _async=not prevalidate,
        )

        if min_confirmations == 0:
            return {
                'chain_id': self.chain_id,
                'hash': opg_hash,
                **self.json_payload(),
            }

        operations = self.shell.wait_operations(
            opg_hashes=[opg_hash],
            ttl=num_blocks_wait,
            min_confirmations=min_confirmations,
            time_between_blocks=time_between_blocks,
            block_timeout=block_timeout,
        )

        assert len(operations) == 1
        if check_result:
            if not OperationResult.is_applied(operations[0]):
                raise RpcError.from_errors(
                    OperationResult.errors(operations[0]))

        return operations[0]
コード例 #5
0
 def _originate_single_contract(self, origination):
     opg = self.client.bulk(origination).autofill().sign().inject(
         min_confirmations=1)
     res = OperationResult.from_operation_group(opg)
     contract_id = res[0].originated_contracts[0]
     _print_contract(contract_id)
     return contract_id
コード例 #6
0
    def test_3_consumed_gas(self) -> None:
        opg = self.client \
            .transaction(destination=sandbox_addresses['bootstrap3'], amount=1000)\
            .send(min_confirmations=1)

        consumed_gas = OperationResult.consumed_gas(opg.opg_result)
        self.assertGreater(consumed_gas, 0)
コード例 #7
0
ファイル: interface.py プロジェクト: trufflesuite/pytezos
    def from_contract_call(cls, operation_group: dict, address,
                           contract: Contract) -> list:
        """ Get a list of results from an operation group content with metadata.

        :param operation_group: {..., "contents": [{..., kind: "transaction", ...}]}
        :param address: address of the invoked contract
        :param contract: invoked contract
        :rtype: List[ContractCallResult]
        """
        results = list()
        for content in OperationResult.iter_contents(operation_group):
            if content['kind'] == 'transaction':
                if content['destination'] == address:
                    results.append(cls.from_transaction(content))
            elif content['kind'] == 'origination':
                result = cls.get_result(content)
                if address in result.get('originated_contracts', []):
                    results.append(cls.from_origination(content))

        def decode_result(res):
            kwargs = dict(storage=contract.storage.decode(res.storage))
            if hasattr(res, 'big_map_diff'):
                contract.storage.big_map_init(res.storage)
                kwargs.update(big_map_diff=contract.storage.
                              big_map_diff_decode(res.big_map_diff))
            if hasattr(res, 'parameters'):
                kwargs.update(parameters=contract.parameter.decode(
                    data=res.parameters))
            if hasattr(res, 'operations'):
                kwargs.update(operations=res.operations)
            return cls(**kwargs)

        return list(map(decode_result, results))
コード例 #8
0
    def from_contract_call(cls, operation_group: dict, address,
                           contract: Contract) -> list:
        results = list()
        for content in OperationResult.iter_contents(operation_group):
            if content['kind'] == 'transaction':
                if content['destination'] == address:
                    results.append(cls.from_transaction(content))
            elif content['kind'] == 'origination':
                result = cls.get_result(content)
                if address in result.get('originated_contracts', []):
                    results.append(cls.from_origination(content))

        def decode_result(res):
            kwargs = dict(storage=contract.storage.decode(res.storage))
            if hasattr(res, 'big_map_diff'):
                contract.storage.big_map_init(res.storage)
                kwargs.update(big_map_diff=contract.storage.
                              big_map_diff_decode(res.big_map_diff))
            if hasattr(res, 'parameters'):
                kwargs.update(parameters=contract.parameter.decode(
                    data=res.parameters))
            if hasattr(res, 'operations'):
                kwargs.update(operations=res.operations)
            return cls(**kwargs)

        return list(map(decode_result, results))
コード例 #9
0
    def deploy_all_staking(self, file_path, meta_uri=v2_meta_uri, admin=None):
        with open(file_path) as f:
            data = json.load(f)
            duration = data["duration"]
            wrap_token = data["wrap_token"]
            reserve_contract = data["reserve_contract"]
            storages = list(
                map(lambda x: self._staking_storage(meta_uri, duration, wrap_token, reserve_contract, x["exponent"],
                                                    token=x["name"],
                                                    admin=admin),
                    data["tokens"]))
            chunk = 5
            contracts = []
            for i in range(0, len(storages), chunk):
                print(f"deploy {i} to {i + chunk}")
                local = storages[i:i + chunk]
                ops = list(map(lambda s: self.staking_contract.originate(initial_storage=s), local))

                opg = self.client.bulk(*ops).autofill().sign().inject(min_confirmations=1)
                print(f"Injected {opg['hash']}")
                deployed = OperationResult.originated_contracts(opg)
                print(f"Deployed {deployed}")
                contracts += deployed
            result = [{"contract": contract, **(data["tokens"][index])} for index, contract in enumerate(contracts)]
            print(json.dumps({"reserve_contract": reserve_contract, "contracts": result}))
コード例 #10
0
ファイル: cli.py プロジェクト: tbinetruy/pytezos
    def deploy(self,
               path,
               storage=None,
               network='carthagenet',
               key=None,
               github_repo_slug=None,
               github_oauth_token=None,
               dry_run=False):
        """
        Deploy contract to the specified network
        :param path: Path to the .tz file
        :param storage: Storage in JSON format (not Micheline)
        :param network:
        :param key:
        :param github_repo_slug:
        :param github_oauth_token:
        :param dry_run: Set this flag if you just want to see what would happen
        """
        ptz = pytezos.using(shell=network, key=key)
        print(
            f'Deploying contract using {ptz.key.public_key_hash()} in the {network}'
        )

        contract = get_contract(path)
        if storage is not None:
            storage = contract.storage.encode(storage)

        try:
            opg = ptz.origination(script=contract.script(
                storage=storage)).autofill().sign()
            print(f'Injecting origination operation:')
            pprint(opg.json_payload())

            if dry_run:
                pprint(opg.preapply())
                exit(0)
            else:
                opg = opg.inject(_async=False)
        except RpcError as e:
            pprint(e)
            exit(-1)
        else:
            originated_contracts = OperationResult.originated_contracts(opg)
            assert len(originated_contracts) == 1
            bcd_link = make_bcd_link(network, originated_contracts[0])
            print(f'Contract was successfully deployed: {bcd_link}')

            if github_repo_slug:
                deployment = create_deployment(github_repo_slug,
                                               github_oauth_token,
                                               environment=network)
                pprint(deployment)
                status = create_deployment_status(
                    github_repo_slug,
                    github_oauth_token,
                    deployment_id=deployment['id'],
                    state='success',
                    environment=network,
                    environment_url=bcd_link)
                pprint(status)
コード例 #11
0
    def from_run_operation(cls, operation_group: Dict[str, Any], context: ExecutionContext) -> List['ContractCallResult']:
        """Get a list of results from an operation group content with metadata.

        :param operation_group: {..., "contents": [{..., kind: "transaction", ...}]}
        :param context: execution context
        :rtype: ContractCallResult
        """
        results: List['OperationResult'] = list()
        for content in OperationResult.iter_contents(operation_group):
            if content['kind'] == 'transaction':
                if content['destination'] == context.address:
                    results.append(cls.from_transaction(content))
            elif content['kind'] == 'origination':
                result = cls.get_result(content)
                if context.address in result.get('originated_contracts', []):
                    results.append(cls.from_origination(content))

        program = MichelsonProgram.load(context)

        def decode_result(res: OperationResult) -> 'ContractCallResult':
            kwargs = {}  # type: ignore
            if hasattr(res, 'storage') and res.storage is not None:  # type: ignore
                storage = program.storage.from_micheline_value(res.storage)  # type: ignore
                if hasattr(res, 'lazy_diff'):
                    kwargs.update(lazy_diff=res.lazy_diff)  # type: ignore
                    storage = storage.merge_lazy_diff(res.lazy_diff)  # type: ignore
                kwargs.update(storage=storage.to_python_object())
            if hasattr(res, 'parameters'):
                parameters = program.parameter.from_parameters(res.parameters)  # type: ignore
                kwargs.update(parameters=parameters)
            if hasattr(res, 'operations'):
                kwargs.update(operations=res.operations)  # type: ignore
            return cls(**kwargs)

        return list(map(decode_result, results))
コード例 #12
0
ファイル: interface.py プロジェクト: juztin/pytezos-1
 def view(self):
     """
     Get return value of a view method.
     :return: object
     """
     opg_with_metadata = self.operation_group.fill().run()
     view_operation = OperationResult.get_contents(opg_with_metadata, source=self.address)[0]
     view_contract = Contract.from_micheline(self.shell.contracts[view_operation['destination']].code())
     return view_contract.parameter.decode(view_operation['parameters'])
コード例 #13
0
    def _check_op(self, op):
        """
        Returns None if operation is not completed
        Raises error if operation failed
        Return operation result if operation is completed
        """

        op_data = op[0] if isinstance(op, tuple) else op
        op_hash = op_data["hash"]

        blocks = self.client.shell.blocks[-self.block_depth:]
        try:
            res = blocks.find_operation(op_hash)
            if not OperationResult.is_applied(res):
                raise RpcError.from_errors(
                    OperationResult.errors(res)) from op_hash
            print(pformat_consumed_gas(res))
            return res
        except StopIteration:
            # not found
            return None
コード例 #14
0
ファイル: cli.py プロジェクト: yourlabsopensource/pytezos
def deploy(
    _ctx,
    path: str,
    storage: Optional[str],  # pylint: disable=redefined-outer-name
    network: str,
    key: Optional[str],
    github_repo_slug: Optional[str],
    github_oauth_token: Optional[str],
    dry_run: bool,
):
    ptz = pytezos.using(shell=network, key=key)
    logger.info('Deploying contract using %s in the %s',
                ptz.key.public_key_hash(), network)

    contract = get_contract(path)
    try:
        opg = ptz.origination(script=contract.script(
            initial_storage=storage)).autofill().sign()
        logger.info('Injecting origination operation:')
        logger.info(pformat(opg.json_payload()))

        if dry_run:
            logger.info(pformat(opg.preapply()))
            sys.exit(0)
        else:
            opg = opg.inject(_async=False)
    except RpcError as e:
        logger.critical(pformat(e))
        sys.exit(-1)
    else:
        originated_contracts = OperationResult.originated_contracts(opg)
        if len(originated_contracts) != 1:
            raise Exception(
                'Operation group must has exactly one originated contract')
        bcd_link = make_bcd_link(network, originated_contracts[0])
        logger.info('Contract was successfully deployed: %s', bcd_link)

        if github_repo_slug:
            deployment = create_deployment(
                github_repo_slug,
                github_oauth_token,
                environment=network,
            )
            logger.info(pformat(deployment))
            status = create_deployment_status(
                github_repo_slug,
                github_oauth_token,
                deployment_id=deployment['id'],
                state='success',
                environment=network,
                environment_url=bcd_link,
            )
            logger.info(status)
コード例 #15
0
def verify_op(op_hash):
    try:
        # look 5 blocks back for our operation
        opg = pytezos.shell.blocks[-5:].find_operation(op_hash)
    except StopIteration as e:
        return 0
    ret = -1
    for op in OperationResult.iter_contents(opg):
        print(op['metadata']['operation_result']['status'])
        if op['metadata']['operation_result']['status'] == 'applied':
            ret = 1
            break
    return ret
コード例 #16
0
    def all(self,
            signers: dict[str, str],
            governance_token,
            tokens: list[TokenAndMetaType],
            nft: list[NftTokenAndMetaType] = [],
            threshold=1):
        originations = [
            self._fa2_origination(tokens),
            self._governance_token_origination(governance_token)
        ]
        originations.extend(
            [self._nft_origination(v) for k, v in enumerate(nft)])
        print("Deploying FA2s and nfts")
        opg = self.client.bulk(*originations).autofill().sign().inject(
            min_confirmations=1, _async=False)
        originated_contracts = OperationResult.originated_contracts(opg)
        for o in originated_contracts:
            _print_contract(o)
        fa2 = originated_contracts[0]
        governance = originated_contracts[1]
        nft_contracts = dict(
            (v["eth_contract"][2:], originated_contracts[k + 2])
            for k, v in enumerate(nft))

        print("Deploying quorum contract")
        quorum = self._originate_single_contract(
            self._quorum_origination(signers, threshold))

        minter = self._deploy_minter(quorum, tokens, fa2, {
            'tezos': governance,
            'eth': governance_token
        }, nft_contracts)
        admin_calls = self._set_tokens_minter(minter, fa2, governance,
                                              nft_contracts)
        print("Setting and confirming FA2s administrator")
        self.client.bulk(*admin_calls).autofill().sign().inject(
            min_confirmations=1)
        print(f"Nfts contracts: {nft_contracts}\n")
        print(
            f"FA2 contract: {fa2}\nGovernance token: {governance}\nQuorum contract: {quorum}\nMinter contract: {minter}"
        )
コード例 #17
0
    def inject(self,
               _async=True,
               preapply=True,
               check_result=True,
               num_blocks_wait=5):
        """ Inject the signed operation group.

        :param _async: do not wait for operation inclusion (default is True)
        :param preapply: do a preapply before injection
        :param check_result: raise RpcError in case operation is refused
        :param num_blocks_wait: number of blocks to wait for injection
        :returns: operation group with metadata (raw RPC response)
        """
        self.context.reset()
        if preapply:
            opg_with_metadata = self.preapply()
            if not OperationResult.is_applied(opg_with_metadata):
                raise RpcError.from_errors(
                    OperationResult.errors(opg_with_metadata))

        opg_hash = self.shell.injection.operation.post(
            operation=self.binary_payload(), _async=False)

        if _async:
            return {
                'chain_id': self.chain_id,
                'hash': opg_hash,
                **self.json_payload()
            }
        else:
            for i in range(num_blocks_wait):
                self.shell.wait_next_block()
                try:
                    pending_opg = self.shell.mempool.pending_operations[
                        opg_hash]
                    if not OperationResult.is_applied(pending_opg):
                        raise RpcError.from_errors(
                            OperationResult.errors(pending_opg))
                    print(f'Still in mempool: {opg_hash}')
                except StopIteration:
                    res = self.shell.blocks[-(i + 1):].find_operation(opg_hash)
                    if check_result:
                        if not OperationResult.is_applied(res):
                            raise RpcError.from_errors(
                                OperationResult.errors(res))
                    return res

        raise TimeoutError(opg_hash)
コード例 #18
0
    def post(self):

        #try:
        payload = v.read_requests(request)
        pytz = v.read_session(session)

        swap = Contract.from_file('./smart_contracts/atomic_swap.tz')
        op = pytz.origination(script=swap.script(storage= { 
                'admin': pytz.key.public_key_hash(), 
                "interested_party": pytz.key.public_key_hash(), 
                'fa12' : payload['fa12'], 
                'immutable': False, 
                'tk_amount' : payload['tk_amount'], 
                'tz_amount' : payload['tz_amount']})).fill().sign().inject(_async=False, num_blocks_wait=2)

        swapkt = OperationResult.originated_contracts(op)
        fa12 = pytz.contract(payload['fa12'])
        print([pytz.key.public_key_hash(), swapkt[0]])
        r = fa12.transfer({"from" : pytz.key.public_key_hash(), "to" : swapkt[0], 'value' : payload['tk_amount']}).inject()
            
        return [v.filter_response(op), r]
コード例 #19
0
ファイル: fa12_route.py プロジェクト: lkazmer/ungrund
    def post(self):

        payload = v.read_requests(request)
        try:
            sess = v.read_session(session)
        except:
            pytz = v.load_keystore()

        if payload['forge'] == True:
            pass

        contract = Contract.from_file('./smart_contracts/fa12.tz')
        op = pytz.origination(script=contract.script(
            storage={
                'ledger': {},
                'admin': payload['admin'],
                'paused': False,
                'totalSupply': payload['total_supply']
            })).autofill().sign().inject(_async=False, num_blocks_wait=2)

        return OperationResult.originated_contracts(op)
コード例 #20
0
ファイル: group.py プロジェクト: jpic/pytezos
    def inject(self, _async=True, check_result=True, num_blocks_wait=2):
        """
        Inject signed operation group.
        :param _async: do not wait for operation inclusion (default is True)
        :param check_result:
        :param num_blocks_wait:
        """
        opg_with_metadata = self.preapply()
        if not OperationResult.is_applied(opg_with_metadata):
            raise RpcError.from_errors(
                OperationResult.errors(opg_with_metadata)) from None

        opg_hash = self.shell.injection.operation.post(
            operation=self.binary_payload(), _async=False)

        if _async:
            return {
                'chain_id': self.chain_id,
                'hash': opg_hash,
                **self.json_payload()
            }
        else:
            for i in range(num_blocks_wait):
                self.shell.wait_next_block()
                try:
                    pending_opg = self.shell.mempool.pending_operations[
                        opg_hash]
                    if not OperationResult.is_applied(pending_opg):
                        raise RpcError.from_errors(
                            OperationResult.errors(pending_opg)) from None
                    print(f'Still in mempool: {opg_hash}')
                except StopIteration:
                    res = self.shell.blocks[-(i + 1):].find_operation(opg_hash)
                    if check_result:
                        if not OperationResult.is_applied(res):
                            raise RpcError.from_errors(
                                OperationResult.errors(res)) from None
                    return res

        raise TimeoutError(opg_hash)
コード例 #21
0
ファイル: group.py プロジェクト: baking-bad/pytezos
    def autofill(
        self,
        gas_reserve: int = DEFAULT_GAS_RESERVE,
        burn_reserve: int = DEFAULT_BURN_RESERVE,
        counter: Optional[int] = None,
        ttl: Optional[int] = None,
        fee: Optional[int] = None,
        gas_limit: Optional[int] = None,
        storage_limit: Optional[int] = None,
        **kwargs,
    ) -> 'OperationGroup':
        """Fill the gaps and then simulate the operation in order to calculate fee, gas/storage limits.

        :param gas_reserve: Add a safe reserve for dynamically calculated gas limit (default is 100).
        :param burn_reserve: Add a safe reserve for dynamically calculated storage limit (default is 100).
        :param counter: Override counter value (for manual handling)
        :param ttl: Number of blocks to wait in the mempool before removal (default is 5 for public network, 60 for sandbox)
        :param fee: Explicitly set fee for operation. If not set fee will be calculated depending on results of operation dry-run.
        :param gas_limit: Explicitly set gas limit for operation. If not set gas limit will be calculated depending on results of
            operation dry-run.
        :param storage_limit: Explicitly set storage limit for operation. If not set storage limit will be calculated depending on
            results of operation dry-run.
        :rtype: OperationGroup
        """
        if kwargs.get('branch_offset') is not None:
            logger.warning('`branch_offset` argument is deprecated, use `ttl` instead')
            ttl = MAX_OPERATIONS_TTL - kwargs['branch_offset']

        opg = self.fill(counter=counter, ttl=ttl)
        opg_with_metadata = opg.run()
        if not OperationResult.is_applied(opg_with_metadata):
            raise RpcError.from_errors(OperationResult.errors(opg_with_metadata))

        extra_size = (32 + 64) // len(opg.contents) + 1  # size of serialized branch and signature

        def fill_content(content: Dict[str, Any]) -> Dict[str, Any]:
            if validation_passes[content['kind']] == 3:
                _gas_limit, _storage_limit, _fee = gas_limit, storage_limit, fee

                if _gas_limit is None:
                    _gas_limit = OperationResult.consumed_gas(content)
                    if content['kind'] in ['origination', 'transaction']:
                        _gas_limit += gas_reserve

                if storage_limit is None:
                    _paid_storage_size_diff = OperationResult.paid_storage_size_diff(content)
                    _burned = OperationResult.burned(content)
                    _storage_limit = _paid_storage_size_diff + _burned
                    if content['kind'] in ['origination', 'transaction']:
                        _storage_limit += burn_reserve

                if _fee is None:
                    _fee = calculate_fee(content, _gas_limit, extra_size)

                current_counter = int(content['counter'])
                content.update(
                    gas_limit=str(_gas_limit),
                    storage_limit=str(_storage_limit),
                    fee=str(_fee),
                    counter=str(current_counter + self.context.get_counter_offset()),
                )

            content.pop('metadata')
            logger.debug("autofilled transaction content: %s" % content)
            return content

        opg.contents = list(map(fill_content, opg_with_metadata['contents']))
        return opg
コード例 #22
0
def sync_to_blockchain(is_dry_run=True, _async=False):
    print('starting sync')
    time.sleep(settings.BLOCKCHAIN_SYNC_WAIT_TIME)
    from apps.wallet.models import Wallet, MetaTransaction, Transaction, WalletPublicKeyTransferRequest, TRANSACTION_STATES

    pytezos_client = pytezos.using(
        key=settings.TEZOS_ADMIN_ACCOUNT_PRIVATE_KEY,
        shell=settings.TEZOS_NODE)
    token_contract = pytezos_client.contract(
        settings.TEZOS_TOKEN_CONTRACT_ADDRESS)

    funding_transactions = {}
    meta_transactions = []
    operation_groups = []

    state_update_items = []

    for transaction in Transaction.objects.exclude(
            state=TRANSACTION_STATES.PENDING.value).exclude(
                state=TRANSACTION_STATES.DONE.value).order_by('created_at'):
        state_update_items.append(transaction)
        if not transaction.from_wallet:
            operation_groups.append(
                token_contract.mint(
                    address=transaction.to_wallet.address,
                    decimals=transaction.to_wallet.currency.decimals,
                    name=transaction.to_wallet.currency.name,
                    token_id=transaction.to_wallet.currency.token_id,
                    symbol=transaction.to_wallet.currency.symbol,
                    amount=transaction.amount).operation_group.sign())
        elif MetaTransaction.objects.filter(pk=transaction.pk).exists():
            meta_transactions.append(
                MetaTransaction.objects.get(pk=transaction))
        else:
            same_from_txs = funding_transactions.get(
                transaction.from_wallet.address, [])
            same_from_txs.append({
                "to_": transaction.to_wallet.address,
                "token_id": transaction.to_wallet.currency.token_id,
                "amount": transaction.amount
            })
            funding_transactions[
                transaction.from_wallet.address] = same_from_txs

    # preparing funding
    if len(funding_transactions.items()) > 0:
        funding_transaction_payloads = list(
            map(lambda item: {
                "from_": item[0],
                "txs": item[1]
            }, funding_transactions.items()))
        operation_groups.append(
            token_contract.transfer(
                funding_transaction_payloads).operation_group.sign())

    # preparing meta
    if len(meta_transactions) > 0:
        meta_transaction_payloads = list(
            map(
                lambda meta_transaction: meta_transaction.
                to_meta_transaction_dictionary(), meta_transactions))
        operation_groups.append(
            token_contract.meta_transfer(
                meta_transaction_payloads).operation_group.sign())

    # wallet public key transfers
    wallet_public_key_transfer_payloads = []
    wallet_public_key_transfer_requests = []
    for wallet_public_key_transfer_request in WalletPublicKeyTransferRequest.objects.exclude(
            state=TRANSACTION_STATES.PENDING.value).exclude(
                state=TRANSACTION_STATES.DONE.value).order_by('created_at'):
        if wallet_public_key_transfer_request.wallet.balance > 0 and wallet_public_key_transfer_request.wallet.public_key != wallet_public_key_transfer_request.new_public_key:
            new_address = Wallet(public_key=wallet_public_key_transfer_request.
                                 new_public_key).address
            state_update_items.append(wallet_public_key_transfer_request)
            wallet_public_key_transfer_requests.append(
                wallet_public_key_transfer_request)
            wallet_public_key_transfer_payloads.append({
                "from_":
                wallet_public_key_transfer_request.wallet.address,
                "txs": [{
                    "to_":
                    new_address,
                    "token_id":
                    wallet_public_key_transfer_request.wallet.currency.
                    token_id,
                    "amount":
                    wallet_public_key_transfer_request.wallet.balance
                }]
            })
        else:
            wallet_public_key_transfer_request.old_public_key = wallet_public_key_transfer_request.wallet.public_key
            wallet_public_key_transfer_request.wallet.public_key = wallet_public_key_transfer_request.new_public_key
            wallet_public_key_transfer_request.wallet.save()
            wallet_public_key_transfer_request.state = TRANSACTION_STATES.DONE.value
            wallet_public_key_transfer_request.notes = "Has no balance or was recovering to same pubkey, transferred offchain"
            wallet_public_key_transfer_request.save()

    if len(wallet_public_key_transfer_payloads) > 0:
        operation_groups.append(
            token_contract.transfer(
                wallet_public_key_transfer_payloads).operation_group.sign())

    # merging all operations into one single group
    final_operation_group = None
    operation_counter = 0
    for operation_group in operation_groups:
        if final_operation_group == None:
            final_operation_group = operation_group
            operation_counter = int(operation_group.contents[0]['counter'])
        else:
            operation_counter += 1
            operation = operation_group.contents[0]
            operation['counter'] = str(operation_counter)
            final_operation_group = final_operation_group.operation(
                operation_group.contents[0])

    if final_operation_group != None:  # we have stuff to sync
        print(final_operation_group)
        operation_result = final_operation_group.sign().preapply()
        print(operation_result)
        if is_dry_run:
            return OperationResult.is_applied(operation_result)
        elif OperationResult.is_applied(operation_result):

            def update_sync_state(items,
                                  state=TRANSACTION_STATES.PENDING.value,
                                  notes='',
                                  operation_hash=''):
                for item in items:
                    type(item).objects.filter(pk=item.pk).update(
                        state=state,
                        notes=notes,
                        operation_hash=operation_hash,
                        submitted_to_chain_at=now())

            update_sync_state(state_update_items)
            try:
                is_confirmed_in_chain = False
                try:
                    operation_inject_result = final_operation_group.sign(
                    ).inject(_async=_async,
                             preapply=True,
                             check_result=True,
                             num_blocks_wait=settings.TEZOS_BLOCK_WAIT_TIME)
                    is_operation_applied = OperationResult.is_applied(
                        operation_inject_result)
                    is_confirmed_in_chain = True
                except AssertionError:
                    # here we assume that the operation was applied even if we know the assertion failed
                    is_operation_applied = True

                if is_operation_applied:
                    for wallet_public_key_transfer_request in wallet_public_key_transfer_requests:
                        wallet_public_key_transfer_request.old_public_key = wallet_public_key_transfer_request.wallet.public_key
                        wallet_public_key_transfer_request.wallet.public_key = wallet_public_key_transfer_request.new_public_key
                        wallet_public_key_transfer_request.wallet.save()
                        wallet_public_key_transfer_request.state = TRANSACTION_STATES.DONE.value
                        wallet_public_key_transfer_request.save()
                    if is_confirmed_in_chain:
                        update_sync_state(state_update_items,
                                          TRANSACTION_STATES.DONE.value,
                                          json.dumps(operation_inject_result),
                                          operation_inject_result['hash'])
                    else:
                        update_sync_state(state_update_items,
                                          TRANSACTION_STATES.DONE.value,
                                          json.dumps(operation_result), "*")
                else:
                    if operation_inject_result is None:
                        update_sync_state(
                            state_update_items,
                            TRANSACTION_STATES.FAILED.value,
                            'Error during sync: {}'.format(
                                json.dumps(operation_result)))
                    else:
                        update_sync_state(
                            state_update_items,
                            TRANSACTION_STATES.FAILED.value,
                            'Error during sync: {}'.format(
                                json.dumps(operation_inject_result)))
                return is_operation_applied
            except Exception as error:
                update_sync_state(
                    state_update_items, TRANSACTION_STATES.FAILED.value,
                    'Exception during sync: {}\nTraceback: {}'.format(
                        repr(error), traceback.format_exc()))
                return False
        else:
            return OperationResult.is_applied(operation_result)
コード例 #23
0
 def test_1_originate_contract(self) -> None:
     ci = ContractInterface.from_michelson(code)
     res = self.client.origination(ci.script()).autofill().sign().inject(
         time_between_blocks=self.TIME_BETWEEN_BLOCKS, min_confirmations=1)
     self.assertEqual(1, len(OperationResult.originated_contracts(res)))
コード例 #24
0
ファイル: minter.py プロジェクト: Gradius93/wrap-tz-contracts
 def _print(self, opg):
     res = OperationResult.from_operation_group(opg)
     print(f"Done {res[0]['hash']}")
コード例 #25
0
    def inject(
        self,
        check_result: bool = True,
        num_blocks_wait: int = 5,
        time_between_blocks: Optional[int] = None,
        min_confirmations: int = 0,
        **kwargs
    ):
        """Inject the signed operation group.

        :param check_result: raise RpcError in case operation is applied but has runtime errors
        :param num_blocks_wait: number of blocks to wait for injection
        :param time_between_blocks: override the corresponding parameter from constants
        :param min_confirmations: number of block injections to wait for before returning
        :returns: operation group with metadata (raw RPC response)
        """
        if kwargs.get('_async'):
            logger.warning('`_async` argument is deprecated, use `min_confirmations` instead')
            min_confirmations = 0 if kwargs['_async'] is True else 1

        self.context.reset()

        opg_hash = self.shell.injection.operation.post(
            operation=self.binary_payload(),
            _async=False,
        )

        if min_confirmations == 0:
            return {
                'chain_id': self.chain_id,
                'hash': opg_hash,
                **self.json_payload(),
            }

        logger.info('Waiting for %s confirmations in %s blocks', min_confirmations, num_blocks_wait)
        in_mempool = True
        confirmations = 0
        for _ in range(num_blocks_wait):
            logger.info('Waiting for the next block')
            self.shell.wait_next_block(time_between_blocks=time_between_blocks)

            if in_mempool:
                try:
                    pending_opg = self.shell.mempool.pending_operations[opg_hash]
                    if not OperationResult.is_applied(pending_opg):
                        raise RpcError.from_errors(OperationResult.errors(pending_opg))
                    logger.info('Operation %s is still in mempool', opg_hash)
                    continue
                except StopIteration:
                    in_mempool = False

            try:
                res = self.shell.blocks[-1:].find_operation(opg_hash)
            except StopIteration:
                logger.info('Operation %s not found in lastest block', opg_hash)
                continue

            if check_result:
                if not OperationResult.is_applied(res):
                    raise RpcError.from_errors(OperationResult.errors(res))

            confirmations += 1
            logger.info('Got %s/%s confirmations', confirmations, min_confirmations)
            if confirmations == min_confirmations:
                return res

        raise TimeoutError(f'Operation {opg_hash} got {confirmations} confirmations in {num_blocks_wait} blocks')
コード例 #26
0
def get_consumed_gas(op_res):
    gs = (r["consumed_gas"] for r in OperationResult.iter_results(op_res))
    return [int(g) for g in gs]
コード例 #27
0
ファイル: quorum.py プロジェクト: Gradius93/wrap-tz-contracts
 def print_opg(self, opg):
     contents = OperationResult.get_contents(opg)
     print(f"Done {opg['hash']}")
     print(f"{OperationResult.get_result(contents[0])}")
     print(f"{OperationResult.consumed_gas(opg)}")
コード例 #28
0
ファイル: client.py プロジェクト: utdemir/pytezos
    def wait(
        self,
        *operation_groups: OperationGroup,
        min_confirmations: int = 1,
        num_blocks_wait: int = 5,
        time_between_blocks: Optional[int] = None,
        prev_hash: Optional[str] = None,
    ) -> Tuple[OperationGroup, ...]:
        """Wait for multiple injected operations to get enough confirmations

        :param min_confirmations: number of block injections to wait for before returning
        :param num_blocks_wait: number of blocks to wait for injection
        :param time_between_blocks: override the corresponding parameter from constants
        :param prev_hash: Current block hash (optional). If not set, current head is used.
        """
        logger.info('Waiting for %s confirmations in %s blocks', min_confirmations, num_blocks_wait)
        confirmations = {opg.opg_hash: 0 for opg in operation_groups}
        for _ in range(num_blocks_wait):
            logger.info('Waiting for the next block')
            prev_hash = self.shell.wait_next_block(time_between_blocks=time_between_blocks, prev_hash=prev_hash)
            block_operations = self.shell.blocks[prev_hash].operations.managers()

            for opg in operation_groups:
                if confirmations[opg.opg_hash] == 0:
                    res = next((item for item in block_operations if item['hash'] == opg.opg_hash), None)
                    if res is not None:
                        logger.info('Operation %s was included in block %s', opg.opg_hash, prev_hash)
                        confirmations[opg.opg_hash] = 1
                        if not OperationResult.is_applied(res):
                            raise RpcError.from_errors(OperationResult.errors(res)) from None
                else:
                    confirmations[opg.opg_hash] += 1
                    logger.info('Got %s/%s confirmations for %s', confirmations[opg.opg_hash], min_confirmations, opg.opg_hash)

            if any(value == 0 for value in confirmations.values()):
                pending_operations = self.shell.mempool.pending_operations.flatten()
                for opg in operation_groups:
                    if confirmations[opg.opg_hash] == 0:
                        res = next((item for item in pending_operations if item['hash'] == opg.opg_hash), None)
                        if res is not None:
                            logger.info('Operation %s is still in mempool', opg.opg_hash)
                            if not OperationResult.is_applied(res):
                                raise RpcError.from_errors(OperationResult.errors(res)) from None

            for opg in operation_groups:
                if confirmations[opg.opg_hash] == 0:
                    confirmations[opg.opg_hash] = self.shell.get_confirmations(
                        opg_hash=opg.opg_hash,
                        kind=opg.contents[0]['kind'],
                        branch=opg.branch,
                        head=prev_hash,
                    )
                    if confirmations[opg.opg_hash] == 0:
                        raise ValueError(f'Operation {opg.opg_hash} is not found') from None

            if all(value >= min_confirmations for value in confirmations.values()):
                return operation_groups

        required_confirmations = min_confirmations * len(operation_groups)
        gathered_confirmations = sum(confirmations.values())
        raise TimeoutError(f'Operations got {gathered_confirmations}/{required_confirmations} confirmations in {num_blocks_wait} blocks')
コード例 #29
0
ファイル: group.py プロジェクト: baking-bad/pytezos
    def result(self) -> List[OperationResult]:
        """Parse the preapply result.

        :rtype: List[OperationResult]
        """
        return OperationResult.from_operation_group(self.preapply())
コード例 #30
0
ファイル: group.py プロジェクト: tbinetruy/pytezos
 def result(self):
     """
     Parse preapply result
     :return: OperationResult
     """
     return OperationResult.from_operation_group(self.preapply())