async def registration(request):
    ##request object received from sanic app, It has all the
    ##paramteres sent by the user.
    async with aiohttp.ClientSession() as session:
        try:
            async with session.post(f"http://{request.app.config.REGISTRATION}/registration",
                json={'email': request.json["email"], "phone_number": request.json["phone_number"],\
                    'adhaar': request.json["adhaar"],'pancard': request.json["pancard"],
                    'first_name': request.json["first_name"], 'last_name': request.json['last_name'],\
                    'user_type': request.json["user_type"]
                }) as request_response:
                data = await request_response.read()
        except Exception as e:
            logging.error(
                "Registration api is not working, Please fix it Dude")
            raise ApiInternalError(
                "Registration api is not working, Please fix it Dude")

    request_json = load_json(data)

    if request_json.get("error"):
        raise ApiBadRequest(f"User already exists")

    user_id, password, secrets = request_json["data"]["user_id"],\
                request_json["data"]["password"], request_json["data"]["secrets"]
    return user_id, password, secrets
Esempio n. 2
0
async def check_batch_status(conn, batch_id):
    status_request = client_batch_submit_pb2.ClientBatchStatusRequest(
        batch_ids=[batch_id], wait=True)
    validator_response = await conn.send(
        validator_pb2.Message.CLIENT_BATCH_STATUS_REQUEST,
        status_request.SerializeToString())

    status_response = client_batch_submit_pb2.ClientBatchStatusResponse()
    status_response.ParseFromString(validator_response.content)
    batch_status = status_response.batch_statuses[0].status
    if batch_status == client_batch_submit_pb2.ClientBatchStatus.INVALID:
        invalid = status_response.batch_statuses[0].invalid_transactions[0]
        raise ApiBadRequest(invalid.message)
    elif batch_status == client_batch_submit_pb2.ClientBatchStatus.PENDING:
        raise ApiInternalError("Transaction submitted but timed out")
    elif batch_status == client_batch_submit_pb2.ClientBatchStatus.UNKNOWN:
        raise ApiInternalError("Something went wrong. Try again later")
Esempio n. 3
0
async def wait_for_status(batch_id, config):
    '''Wait until transaction status is not PENDING (COMMITTED or error).
       'wait' is time to wait for status, in seconds.
    '''
    headers = {'Content-Type': 'application/json'}
    waited = 0
    start_time = time.time()
    wait = config.TIMEOUT
    timeout = aiohttp.ClientTimeout(total=config.TIMEOUT)
    while waited < wait:
        async with aiohttp.ClientSession(timeout=timeout) as session:
                async with session.get(f"http://{config.REST_API_URL}/batch_statuses?id={batch_id}", headers=headers) as response:
                    await asyncio.sleep(0.5)
                    data = await response.read()
        try:
            data = load_json(data)
            status = data['data'][0]['status']
        except Exception as e:
            logging.error("Error in wait for status")
            logging.error(e)
            status = ""
            pass

        waited = time.time() - start_time
        logging.info(f"Trying again, to check block status BLOCK-STATUS {status}")
        if status != 'PENDING':
            break
    if status == "COMMITTED":
        logging.info("Transaction successfully submittted")
        return True

    elif status == "PENDING":
        logging.error("Transaction submitted but timed out")
        raise ApiInternalError("Transaction submitted but timed out")
    elif status == "UNKNOWN":
        logging.error("Something went wrong. Try again later")
        raise ApiInternalError("Something went wrong. Try again later")
    elif status == "INVALID":
        logging.error("Transaction submitted to blockchain is invalid")
        raise ApiInternalError("Transaction submitted to blockchain is invalid")

    else:
        logging.error("Error in the transaction {%s}"%data['data'][0]['message'])
        raise ApiBadRequest("Error in the transaction {%s}"%data['data'][0]['message'])
    return False
Esempio n. 4
0
    def is_receive_secrets(self):
        """
        check if receive secret_addresses are valid receive_secret_addresses

        """
        error_msg = f"{__name__} Not a receive_secret address"
        ##check whether all the receive_secret_addrs are valid
        for addr in self.receive_secret_addresses:
            if "RECEIVE_SECRET" != addresser.address_is(addr)[0]:
                raise ApiInternalError(error_msg)
Esempio n. 5
0
 async def store_receive_secrets(self, user_id, data):
     try:
         result = await db_secrets.store_data(self.app, self.table_name,
                                              user_id, data)
     except Exception as e:
         msg = f"Storing receive secreates failed with an error {e}"
         logging.error(msg)
         raise ApiInternalError(msg)
     logging.info(f"Store receive secret successful with messege {result}")
     return result
Esempio n. 6
0
    async def update_user_receive_secret(self, user_id, index):
        try:
            result = await db_secrets.update_array_with_index(
                self.app, self.user_table, user_id, self.array_name, index)

        except Exception as e:
            msg = f"Updating receive secret array of user failed with {e}"
            logging.error(msg)
            raise ApiInternalError(msg)
        logging.info(
            f"Updating  receive secret arr o user sucessful with {result}")
        return result
Esempio n. 7
0
 async def push_transaction(self, batch_list_bytes):
     try:
         async with aiohttp.ClientSession(timeout=self.timeout) as session:
             async with session.post(f"http://{self.rest_api_url}/batches",
                             data=batch_list_bytes,
                             headers=self.headers) as response:
                 data = await response.read()
     except Exception as e:
         logger.error("Blockchain rest-api is unreachable, Please fix it dude")
         raise ApiInternalError("Blockchain rest-api is unreachable, Please fix it dude")
     logger.info(f"Data returned after pushing the transaction on the blockchain {data}")
     return data
async def get_s3_link(url):
    ##request object received from sanic app, It has all the
    ##paramteres sent by the user.
    #url(bytes)
    logging.info(f"URL received in get_s3_link {url}")
    async with aiohttp.ClientSession() as session:
        try:
            async with session.get(url.decode()) as request_response:
                data = await request_response.read()
        except Exception as e:
            logging.error(f"error {e} in {__file__} ")
            raise ApiInternalError("Error with s3 url")
    return data
Esempio n. 9
0
    async def update_array_with_value(self, user_id, value):
        try:
            result = await r.table(self.user_table)\
                .filter({"user_id": user_id})\
                .update({self.array_name: r.row[self.array_name].append(value)})\
                .run(self.app.config.DB)

        except Exception as e:
            msg = f"Updating {self.table_name} array of user failed with {e}"
            logging.error(msg)
            raise ApiInternalError(msg)
        logging.info(
            f"Updating {self.array_name} array user sucessful with {result}")
        return result
Esempio n. 10
0
async def gateway_scrypt_keys(app, password, num_keys, salt):
    async with aiohttp.ClientSession() as session:
        try:
            headers = {"x-api-key": app.config.API_GATEWAY_KEY}
            async with session.post(app.config.API_GATEWAY["SCRYPT_KEYS"],
                                    data=json.dumps({
                                        "password": password,
                                        "num_keys": num_keys,
                                        "salt": salt
                                    }),
                                    headers=headers) as request_response:
                data = await request_response.read()
        except Exception as e:
            logging.error(f"error {e} in {__file__} ")
            raise ApiInternalError("Error with s3 url")
    return json.loads(data)
async def send_transfer_asset(**in_data):
    inputs = [in_data["receiver_address"], in_data["issuer_address"]]
    outputs = [in_data["receiver_address"], in_data["issuer_address"]]

    transfer_asset = payload_pb2.CreateTransferAsset(
        key=in_data["key"],
        url=in_data["url"],
        time=in_data["time"],
        indiantime=in_data["indiantime"],
        file_name=in_data["file_name"],
        file_hash=in_data["file_hash"],
        master_key=in_data["master_key"],
        master_url=in_data["master_url"],
        expired_on=in_data["expired_on"],
        scope=in_data["scope"],
        receiver_address=in_data["receiver_address"],
        issuer_address=in_data["issuer_address"],
        issuer_pub=in_data["issuer_pub"],
        issuer_zero_pub=in_data["issuer_zero_pub"],
        signed_nonce=in_data["signed_nonce"],
        nonce=in_data["nonce"],
        issuer_child_zero_pub=in_data["issuer_child_zero_pub"],
    )

    payload = payload_pb2.TransactionPayload(
        payload_type=payload_pb2.TransactionPayload.TRANSFER_ASSET,
        transfer_asset=transfer_asset)

    transaction_ids, batches, batch_id, batch_list_bytes = make_header_and_batch(
        payload=payload,
        inputs=inputs,
        outputs=outputs,
        txn_key=in_data["txn_key"],
        batch_key=in_data["batch_key"])

    logging.info(f"This is the batch_id {batch_id}")

    rest_api_response = await messaging.send(batch_list_bytes,
                                             in_data["config"])

    try:
        result = await messaging.wait_for_status(batch_id, in_data["config"])
    except (ApiBadRequest, ApiInternalError) as err:
        #await auth_query.remove_auth_entry(request.app.config.DB_CONN, request.json.get('email'))
        logging.error(f"Transaction Failed with error {err}")
        raise ApiInternalError(err)
    return transaction_ids, batch_id
Esempio n. 12
0
    async def _generate_shared_secret_addr(self):
        """
        THis will generate shared_Secret addresses,
        First fetch if there are any already shared_secret_addresses

        required = share_secret_addresses - number

        Three cases arises:
            1. User wants to increase the number of receivers then was already there
                in this case, required will be negative
            2. User wants to decrease the number of number of receivers, in this
                case required will be positive
            3. FIrst time required will be negative
            4. THe number of same, required is ZERO
        """
        ##TODO: update share secret, for example in case 2 is valid, then
        ##all the share_secret_address who arent participating must be made inactive

        ##first time when shre_mnemonic transactions are being floated
        idxs = []
        if not self.share_secret_addresses:
            for _ in range(0, self.total_shares):
                idx = await generate_key_index(None)
                idxs.append(idx)
        else:
            required = len(self.share_secret_addresses) - self.total_shares
            #shared_idxs = self.org_state.get("shared_secret")
            if required < 0:
                ##this implies the the previousl share_secret_addresses are
                ##equal to the reuqired right now
                idxs = [generate_key_index() for _ in range(0, required)]

                for addr in self.share_secret_addresses:
                    ins = await resolve_address.ResolveAddress(
                        addr, self.app.config.REST_API_URL)
                    idxs.append(ins.data["idx"])
            else:
                raise ApiInternalError(f"{__name__} The total shares are less \
                        than secret_addresses present, not implemented yet")
        ##resolving account for the requester to get his decrypted menmonic

        ##On the basis of the length of user_accounts, generate random indexes
        ## from the mnemonic and get the PUBlic/private keys corresponding to these
        ##indxs, these, these addresses will be appended to the

        return await remote_calls.key_index_keys(self.app, self.user_mnemonic,
                                                 idxs)
Esempio n. 13
0
    async def store(self, user_id, data):
        if not await find_on_key(self.app, "user_id", user_id):
            raise CustomError(
                f"This user account couldnt be found user_id <<{user_id}>>")

        try:
            result = await r.table(self.table_name)\
                .insert(data).run(self.app.config.DB)

        except ReqlNonExistenceError as e:
            msg = f"Storing {self.table_name} failed with an error {e}"
            logging.error(msg)
            raise ApiInternalError(msg)

        logging.info(
            f"Store {self.table_name} successful with messege {result}")
        return result
Esempio n. 14
0
async def child_keys(url, mnemonic, child_key_index):

    async with aiohttp.ClientSession() as session:
        try:
            async with session.post(f"http://{url}/child_mnemonic_keys",
                json={'mnemonic': mnemonic, "child_key_index": child_key_index\
                }) as request_response:
                data = await request_response.read()
        except Exception as e:
            logging.error(
                "Registration api is not working, Please fix it Dude")
            raise ApiInternalError(
                "Registration api is not working, Please fix it Dude")
    result = load_json(data)
    master_public_key = result["data"]["master_public_key"]
    child_public_key = result["data"]["child_public_key"]
    child_private_key = result["data"]["child_private_key"]
    return master_public_key, child_public_key, child_private_key
Esempio n. 15
0
async def send(data, config):
    """
    batch_request = client_batch_submit_pb2.ClientBatchSubmitRequest()
    batch_request.batches.extend(batches)
    await conn.send(
        validator_pb2.Message.CLIENT_BATCH_SUBMIT_REQUEST,
        batch_request.SerializeToString(),
        timeout)
    """
    headers = {'Content-Type': 'application/octet-stream'}
    timeout = aiohttp.ClientTimeout(total=config.TIMEOUT)
    try:
        async with aiohttp.ClientSession(timeout=timeout) as session:
            async with session.post(f"http://{config.REST_API_URL}/batches", data=data, headers=headers) as response:
                data = await response.read()
    except Exception as e:
        logging.error("Blockchain rest-api is unreachable, Please fix it dude")
        raise ApiInternalError("Blockchain rest-api is unreachable, Please fix it dude")
    return data
Esempio n. 16
0
def ecdsa_signature_verify(public_key, signature, raw_message):
    secp_public = Secp256k1PublicKey.from_hex(public_key)
    #unhexlify signature

    try:
        signature = binascii.unhexlify(signature)
    except Exception as e:
        logging.error("Signatures are not in valid hex format")
        raise ApiInternalError(e)

    unserialized = secp_public.secp256k1_public_key.ecdsa_deserialize(signature)


    if isinstance(raw_message, int):
        raw_message = str(raw_message)

    if isinstance(raw_message, str):
        raw_message = raw_message.encode()

    return secp_public.secp256k1_public_key.ecdsa_verify(raw_message, unserialized)
Esempio n. 17
0
async def generate_mnemonic(url):
    ##request object received from sanic app, It has all the
    ##paramteres sent by the user.
    async with aiohttp.ClientSession() as session:
        try:
            async with session.get(
                    f"http://{url}/get_mnemonic") as request_response:
                data = await request_response.read()
        except Exception as e:
            logging.error(f"error {e} in {__file__} ")
            logging.error(
                "Registration api is not working, Please fix it Dude")
            raise ApiInternalError(
                "Registration api is not working, Please fix it Dude")

    request_json = load_json(data)

    master_pub, master_priv, zero_pub, zero_priv, mnemonic= request_json["data"]["master_public_key"],\
                request_json["data"]["master_private_key"], request_json["data"]["zeroth_public_key"],\
                    request_json["data"]["zeroth_private_key"], request_json["data"]["mnemonic"]
    return master_pub, master_priv, zero_pub, zero_priv, mnemonic
async def send_receive_asset(**in_data):
    """
    """
    address = addresser.receive_asset_address(
        asset_id=in_data["txn_key"].get_public_key().as_hex(),
        index=in_data["idx"])

    inputs = [in_data["org_address"], address]
    outputs = [in_data["org_address"], address]
    logging.info(in_data)
    if in_data["child_zero_pub"]:

        child_address = addresser.child_account_address(
            account_id=in_data["child_zero_pub"], index=0)
        logging.info(f"CHILD address is {child_address}")
        inputs.append(child_address)
        outputs.append(child_address)

    if in_data["receive_asset_details"]:
        receive_asset_details = payload_pb2.ReceiveAssetDetails(
            name=in_data["receive_asset_details"]["name"],
            description=in_data["receive_asset_details"]["description"],
        )
    receive_asset = payload_pb2.CreateReceiveAsset(
        _id_=in_data["_id_"],
        time=in_data["time"],
        indiantime=in_data["indiantime"],
        idx=in_data["idx"],
        at_which_asset_expires=in_data["at_which_asset_expires"],
        org_name=in_data["org_name"],
        org_address=in_data["org_address"],
        org_role=in_data["org_role"],
        org_zero_pub=in_data["org_zero_pub"],
        receive_asset_details=receive_asset_details,
        child_zero_pub=in_data["child_zero_pub"],
        signed_nonce=in_data["signed_nonce"],
        nonce=in_data["nonce"],
        nonce_hash=in_data["nonce_hash"],
        unique_code_hash=in_data["unique_code_hash"],
        encrypted_unique_code=in_data["encrypted_unique_code"],
        encrypted_admin_unique_code=in_data["encrypted_admin_unique_code"])

    payload = payload_pb2.TransactionPayload(
        payload_type=payload_pb2.TransactionPayload.RECEIVE_ASSET,
        receive_asset=receive_asset)
    logging.info(payload)
    transaction_ids, batches, batch_id, batch_list_bytes = make_header_and_batch(
        payload=payload,
        inputs=inputs,
        outputs=outputs,
        txn_key=in_data["txn_key"],
        batch_key=in_data["batch_key"])

    logging.info(f"This is the batch_id {batch_id}")

    rest_api_response = await messaging.send(batch_list_bytes,
                                             in_data["config"])

    try:
        result = await messaging.wait_for_status(batch_id, in_data["config"])
    except (ApiBadRequest, ApiInternalError) as err:
        #await auth_query.remove_auth_entry(request.app.config.DB_CONN, request.json.get('email'))
        logging.error(f"Transaction failed with {err}")
        raise ApiInternalError(err)
        #raise err
    return transaction_ids, batch_id
Esempio n. 19
0
 def check_total_shares(self):
     if self.minimum_required < len(self.receive_secret_addresses):
         logger.success("Total shares condition fulfilled")
     else:
         raise ApiInternalError("To share passwords minimum 3 \
         users are required")
Esempio n. 20
0
def base64decoding(file_bytes):
    try:
        return base64.b64decode(file_bytes)
    except Exception as e:
        raise ApiInternalError(e)
Esempio n. 21
0
 async def push_n_wait(self, batch_bytes, batch_id):
     rest_api_response = await self.push_transaction(batch_bytes)
     logger.info(f"push transaction result is {rest_api_response}")
     if not await self.wait_for_status(batch_id):
         raise ApiInternalError("The batch couldnt be submitted")
     return
Esempio n. 22
0
async def send_create_asset(**in_data):
    """
    Args
        key(str), hex_encoded: encrypted AES key with user publickey present
                at random index
        url(str): s3 url encrypted with user public key
        time(str): when this asset was created
        indiantime(str): time in indian format
        file_name(str): file_name
        file_hash(str): sha3_512 hash of file content
        child_idx(int): random index
        parent_zero_pub(str): Parent zero public key of the parent
        master_key(str): encrypted s3 url, encrypted with aes key generated
                        with qci_public and user private key
        master_url(str): encrypted s3 url, encrypted with aes key
                        generated with private key of user and  public of QCI
        scope(Scope(defined in asset.proto)):
        string expired_on=13; //the date on which this certificate is intended
    """
    ##TODO: Processor side : Float this asset and make change to create_asset_idxs
    ## to either float_Account_Address or create_Account_Address depending upon
    ##whther the user has been claimed or not
    inputs = [
        addresser.create_asset_address(
            asset_id=in_data["txn_key"].get_public_key().as_hex(),
            index=in_data["idx"]),
    ]

    outputs = [
        addresser.create_asset_address(
            asset_id=in_data["txn_key"].get_public_key().as_hex(),
            index=in_data["idx"])
    ]

    ##ideally if account is claimed, we should have nothing to do with float account
    ## but we are sending both addresses to the processor and let processor handle
    ## the logic i.e float_account should exists and is_claimed shall be true
    ##to append create_asset_idxs to the account_transaction
    if not in_data["is_acc_claimed"]:
        ##implies user havent claimed his float_account_address, so the
        ## create_asset_idx aill be chnaged on flt_account_addresslogging.info("Float account parent pub %s"%in_data["flt_account_parent_pub"])
        logging.info("Float account parent idx %s" %
                     str(in_data["flt_account_parent_idx"]))
        float_account_address = addresser.float_account_address(
            account_id=in_data["flt_account_parent_pub"],
            index=in_data["flt_account_parent_idx"])
        inputs.append(float_account_address)
        outputs.append(float_account_address)
    else:

        account_address = addresser.create_organization_account_address(
            account_id=in_data["zero_pub"], index=0)

        inputs.append(account_address)
        outputs.append(account_address)

    if in_data["child_zero_pub"]:
        child_address = addresser.child_account_address(
            account_id=in_data["child_zero_pub"], index=0)
        inputs.append(child_address)
        outputs.append(child_address)

    if in_data["scope"]:
        scope = payload_pb2.PayloadScope(
            group=in_data["scope"]["group"],
            sub_group=in_data["scope"]["sub_group"],
            field=in_data["scope"]["field"],
            nature=in_data["scope"]["nature"],
            operations=in_data["scope"]["operations"],
            description=in_data["scope"]["description"],
        )
    else:
        scope = None

    logging.info(f"Input Address<<{inputs}>>")
    logging.info(f"Output Address<<{outputs}>>")

    asset = payload_pb2.CreateAsset(
        key=in_data["key"],
        url=in_data["url"],
        time=in_data["time"],
        indiantime=in_data["indiantime"],
        file_name=in_data["file_name"],
        file_hash=in_data["file_hash"],
        idx=in_data["idx"],
        master_key=in_data["master_key"],
        master_url=in_data["master_url"],
        role=in_data["role"],
        scope=scope,
        zero_pub=in_data["zero_pub"],
        flt_account_parent_pub=in_data["flt_account_parent_pub"],
        flt_account_parent_idx=in_data["flt_account_parent_idx"],
        child_zero_pub=in_data["child_zero_pub"])

    logging.info(f"Create asset transaction {asset}")
    payload = payload_pb2.TransactionPayload(
        payload_type=payload_pb2.TransactionPayload.CREATE_ASSET,
        create_asset=asset)

    transaction_ids, batches, batch_id, batch_list_bytes = make_header_and_batch(
        payload=payload,
        inputs=inputs,
        outputs=outputs,
        txn_key=in_data["txn_key"],
        batch_key=in_data["batch_key"])

    logging.info(f"This is the batch_id {batch_id}")

    rest_api_response = await messaging.send(batch_list_bytes,
                                             in_data["config"])

    try:
        result = await messaging.wait_for_status(batch_id, in_data["config"])
    except (ApiBadRequest, ApiInternalError) as err:
        #await auth_query.remove_auth_entry(request.app.config.DB_CONN, request.json.get('email'))
        logging.error(f"Transaction failed with {err}")
        raise ApiInternalError(err)
        #raise err
    return transaction_ids, batch_id
Esempio n. 23
0
async def submit_share_asset(app, requester, asset_address,
                             receive_asset_address, unique_code, revoked_on,
                             comments):
    """

        1.check whether asset_address is valid asset address or not
        2. check whether the asset is empty or not
        3.check whether the asset has been transsefred the ownership to someother  empty asset
        4.check whether the requester is the owner of this asset or not
        5.check whether the receiver_asset_address is a valid receive_asset_address or not
        6.check whether at_which_asset_expires is stil valid or hasnt expired
        7.cheque whether the sha_2224 hash of unique code matches with receiver_asset

    """

    f = await userapis.SolveAccount(requester, app)
    decrypted_mnemonic = f.decrypted_mnemonic
    org_state = f.org_state
    logging.info(f"THis is the decrypted mnemonic {decrypted_mnemonic}")
    share_asset_idxs = f.org_state.get("share_asset_idxs")
    child_user_id = f.child_user_id
    child_zero_pub = f.child_zero_pub
    account_zero_pub = f.zero_pub

    unique_code_hash = hashlib.sha224(str(unique_code).encode()).hexdigest()
    if await share_assets_query.find_share_asset(app, asset_address,
                                                 receive_asset_address):
        raise ApiInternalError("This shared asset has already been done")
    ##checking point 5
    receive_asset_instance = await userapis.SolveAddress(
        receive_asset_address, app.config.REST_API_URL)

    if receive_asset_instance.type != "RECEIVE_ASSET":
        raise AssetError("receive_asset_address is notreceive asset address")

    if not receive_asset_instance.data[
            "at_which_asset_expires"] > upload_utils.now_time_stamp():
        raise errors.InvalidValidityPeriod("The time to share asset with this \
                    address has been expired")

    if receive_asset_instance.data["unique_code_hash"] !=\
            unique_code_hash:
        raise AssetError(
            "Unique code provided is either wrong or meant for different receiver_address"
        )

    asset_instance = await userapis.SolveAddress(asset_address,
                                                 app.config.REST_API_URL)
    if asset_instance.type != "CREATE_ASSET":
        raise AssetError("asset_address is not asset address")

    ##check point 2
    if not asset_instance.data["file_name"] or not asset_instance.data[
            "file_hash"]:
        raise AssetError("Empty assets cannot be shared")

    ##decrypting issuer mnemonic

    requester_account_address = addresser.create_organization_account_address(
        account_id=org_state["public"], index=0)

    ##Check if the asset had been transffered to the issuer i.e issets which were
    ###not created by the issuer cant be transffered to other users
    if asset_instance.data.get("ownership_transfer"):
        message = f"This asset which already have been transffered to \
                {issuer_asset.get('ownership_transfer')} can be shared"

        logging.error(message)
        raise AssetError(message)

    key_index = await ledger_utils.generate_key_index(share_asset_idxs)
    logging.info(f"THis is the key index for issuer {key_index}")

    ##at which the asset was created
    asset_index = asset_instance.data["idx"]

    nth_keys = await remote_calls.key_index_keys(app, decrypted_mnemonic,
                                                 [0, key_index, asset_index])

    ##account kets for the issuer
    requester_zeroth_priv, requester_zeroth_pub = \
                    nth_keys[str(0)]["private_key"], \
                        nth_keys[str(0)]["public_key"]

    ##keys at which teh asset which needs to be shared was floated
    create_asset_priv, create_asset_pub = nth_keys[str(asset_index)]["private_key"], \
                            nth_keys[str(asset_index)]["public_key"]

    ##keys at which the shared asset index will be floated
    share_asset_priv, share_asset_pub = nth_keys[str(key_index)]["private_key"], \
                            nth_keys[str(key_index)]["public_key"]

    ##check if issuer n th public key is exactly the public key mentioned in the
    ##asset transaction present on the blockchain, this also checks whether
    ##the requester is actually the owner of the asset
    if create_asset_pub != asset_instance.data.get("public"):
        logging.error("This asset address is not owned by the issuer")
        raise AssetError("This asset address is not owned by the issuer")

    ##decrypting file data stored ob the issuer asset address, this can be
    ##done by issuer private key present on the nth index
    data = await asset_utils.decrypt_file_data(
        asset_instance.data["key"], asset_instance.data["url"],
        asset_instance.data["file_hash"], create_asset_priv)

    ##TODO: check file_hash
    file_data = {
        "data": data,
        "file_name": asset_instance.data["file_name"],
        "file_hash": asset_instance.data["file_hash"]
    }

    ##encrypting again with the public key present at the receiver_asset_address
    key, encrypted_key, s3_url, encrypted_s3_url = \
        await asset_utils.encrypt_file_data(None, receive_asset_instance.data["public"], app.config,
            file_data)

    logging.info(f"This is the key {key} , encrypted_key{encrypted_key} \
                and the s3_url {s3_url}")
    master_key, master_url = await asset_utils.master_url_n_key(
        app.config.ADMIN_ZERO_PUB, key, s3_url)

    ##Now this transaction should be signed by user

    create_asset_signer = ledger_utils.create_signer(share_asset_priv)

    ##for added security we will send a nonce signed by issuer account
    ##private key
    nonce = random.randint(2**20, 2**30)
    nonce_hash = hashlib.sha224(str(nonce).encode()).hexdigest()
    account_hex_signature = signatures.ecdsa_signature(requester_zeroth_priv,
                                                       nonce)

    ##nonce must also be signed with the private key at random index at which
    ##create asset is present
    asset_hex_signature = signatures.ecdsa_signature(create_asset_priv, nonce)

    transaction_data = {
        "config": app.config,
        "txn_key": create_asset_signer,
        "batch_key": app.config.SIGNER,
        "key": encrypted_key,
        "url": encrypted_s3_url,
        "master_key": master_key,
        "master_url": master_url,
        "time": int(time.time()),
        "indiantime": upload_utils.indian_time_stamp(),
        "file_name": asset_instance.data["file_name"],
        "file_hash": asset_instance.data["file_hash"],
        "original_asset_address": asset_address,
        "revoked_on": revoked_on,
        "comments": comments,
        "idx": key_index,
        "account_signature": account_hex_signature,
        "asset_signature": asset_hex_signature,
        "nonce": nonce,
        "nonce_hash": nonce_hash,
        "to_org_name": receive_asset_instance.data["org_name"],
        "to_org_address": receive_asset_instance.data["org_address"],
        "issuer_account_address": requester_account_address,
        "receive_asset_address": receive_asset_address,
        "child_zero_pub": child_zero_pub,
        "unique_code_hash": unique_code_hash,
    }

    transaction_ids, batch_id = await send_share_asset(**transaction_data)

    if transaction_ids:
        logging.info("Share Transaction has been created successfully")
        ##which imlies the transaction has been submitted successfully,
        ##now all the changes that are required to be done on the databse can
        ##be done
        ##Update users create_asset_idxs key on users entry will be updated by
        ## whomever will call this, because update can happend on pending_users
        share_asset_address = addresser.share_asset_address(
            share_asset_pub, key_index)
        account_signature = account_hex_signature.decode()
        asset_signature = asset_hex_signature.decode()
        transaction_data.update({
            "transaction_id": transaction_ids[0],
            "batch_id": batch_id,
            "account_signature": account_signature,
            "asset_signature": asset_signature,
            "address": share_asset_address
        })

        [
            transaction_data.pop(field)
            for field in ["config", "txn_key", "batch_key"]
        ]
        await share_assets_query.store_share_asset(app, transaction_data)

        ##now asset must be update with share_with key
        await assets_query.update_issuer_asset_shared(app, asset_address,
                                                      key_index)
        ##TOFO update receiver_address asset and issuer_asset_Address in DB

        await accounts_query.update_share_asset_idxs(app, org_state["user_id"],
                                                     key_index)

        if child_user_id:
            await accounts_query.update_share_asset_idxs(
                app, child_user_id, key_index)

        return share_asset_address

    else:
        return False
    return
Esempio n. 24
0
 def check_minimum_req(self):
     if self.minimum_required >= 3:
         logger.success("Minimum requirement fulfilled")
     else:
         raise ApiInternalError(f"{__name__} To share passwords minimum 3 \
         users are required")
async def send_share_asset(**in_data):

    inputs = [
        in_data["original_asset_address"],
        addresser.share_asset_address(
            in_data["txn_key"].get_public_key().as_hex(), in_data["idx"]),
        in_data["issuer_account_address"]  #issuer_account_address
    ]

    outputs = [
        in_data["original_asset_address"],
        addresser.share_asset_address(
            in_data["txn_key"].get_public_key().as_hex(), in_data["idx"]),
        in_data["issuer_account_address"]  #issuer_account_address
    ]

    if in_data["child_zero_pub"]:
        child_account_address = addresser.child_account_address(
            in_data["child_zero_pub"], 0)
        inputs.append(child_account_address)
        outputs.append(child_account_address)

    share_asset = payload_pb2.CreateShareAsset(
        key=in_data["key"],
        url=in_data["url"],
        master_key=in_data["master_key"],
        master_url=in_data["master_url"],
        time=in_data["time"],
        indiantime=in_data["indiantime"],
        file_name=in_data["file_name"],
        file_hash=in_data["file_hash"],
        original_asset_address=in_data["original_asset_address"],
        revoked_on=in_data["revoked_on"],
        #details=in_data["details"],
        idx=in_data["idx"],
        account_signature=in_data["account_signature"],
        asset_signature=in_data["asset_signature"],
        nonce=in_data["nonce"],
        nonce_hash=in_data["nonce_hash"],
        to_org_name=in_data["to_org_name"],
        to_org_address=in_data["to_org_address"],
        issuer_account_address=in_data["issuer_account_address"],
        receive_asset_address=in_data["receive_asset_address"],
        child_zero_pub=in_data["child_zero_pub"],
        unique_code_hash=in_data["unique_code_hash"],
    )
    logging.info(pprint(share_asset))

    payload = payload_pb2.TransactionPayload(
        payload_type=payload_pb2.TransactionPayload.SHARE_ASSET,
        share_asset=share_asset)

    transaction_ids, batches, batch_id, batch_list_bytes = make_header_and_batch(
        payload=payload,
        inputs=inputs,
        outputs=outputs,
        txn_key=in_data["txn_key"],
        batch_key=in_data["batch_key"])

    logging.info(f"This is the batch_id {batch_id}")

    rest_api_response = await messaging.send(batch_list_bytes,
                                             in_data["config"])

    try:
        result = await messaging.wait_for_status(batch_id, in_data["config"])
    except (ApiBadRequest, ApiInternalError) as err:
        #await auth_query.remove_auth_entry(request.app.config.DB_CONN, request.json.get('email'))
        logging.error(f"Transaction failed with {err}")
        raise ApiInternalError(err)
        #raise err
    return transaction_ids, batch_id