def create_child_account(**in_data): inputs = [ addresser.create_organization_account_address( account_id=in_data["parent_zero_pub"], index=0), addresser.child_account_address( account_id=in_data["txn_key"].get_public_key().as_hex(), index=0), ] outputs = [ addresser.create_organization_account_address( account_id=in_data["parent_zero_pub"], index=0), addresser.child_account_address( account_id=in_data["txn_key"].get_public_key().as_hex(), index=0), ] account = payload_pb2.CreateChildAccount( parent_zero_pub=in_data["parent_zero_pub"], parent_idx=in_data["parent_idx"], parent_role=in_data["parent_role"], org_name=in_data["org_name"], first_name=in_data["first_name"], last_name=in_data["last_name"], user_id=in_data["user_id"], pancard=in_data["pancard"], gst_number=in_data["gst_number"], tan_number=in_data["tan_number"], phone_number=in_data["phone_number"], email=in_data["email"], time=in_data["time"], indian_time=in_data["indian_time"], role=in_data["role"], deactivate=in_data["deactivate"], deactivate_on=in_data["deactivate_on"], nonce=in_data["nonce"], nonce_hash=in_data["nonce_hash"], signed_nonce=in_data["signed_nonce"], ) logging.info(account) logging.info(f"THe address for the user on blockchain {inputs[0]}") payload = payload_pb2.TransactionPayload( payload_type=payload_pb2.TransactionPayload.CREATE_CHILD_ACCOUNT, create_child_account=account) logging.info(payload) return make_header_and_batch(payload=payload, inputs=inputs, outputs=outputs, txn_key=in_data["txn_key"], batch_key=in_data["batch_key"])
def get_organization_account(account): headers = get_headers_on_email(account["email"], account["password"]) user = ret.table("users").filter( ret.row["email"] == account["email"]).coerce_to("array").run(conn)[0] address = addresser.create_organization_account_address( user["acc_zero_pub"], index=0) r = requests.get("http://localhost:8000/accounts/address", params={"address": address}) logging.info(json.dumps(r.json()["data"], indent=4))
def set_org_float_account_idxs(self, public_key, key_index): account = self.get_organization(public_key) address = addresser.create_organization_account_address( account_id=public_key, index=0) if key_index in account.float_account_idxs: raise InvalidTransaction( "Key index is already present in float_account_idxs") account.float_account_idxs.append(key_index) self._context.set_state({address: account.SerializeToString()}, self._timeout) return
def set_organization_children(self, public_key, key_index): account = self.get_organization(public_key) address = addresser.create_organization_account_address( account_id=public_key, index=0) if key_index in account.child_account_idxs: raise InvalidTransaction("Key index is already present in \ child_account_idxs") account.child_account_idxs.append(key_index) self._context.set_state({address: account.SerializeToString()}, self._timeout) logging.info("Account at address {} and public_key{} appended with \ key index {}".format(address, public_key, key_index)) return
def transfer_assets(self, payload, public): issuer_account_address = addresser.create_organization_account_address( account_id=payload.issuer_zero_pub, index=0) issuer_asset = self.asset_at_address(payload.issuer_address) issuer_asset.ownership_transfer = payload.receiver_address issuer_asset.transferred_on = payload.indiantime receiver_asset = self.asset_at_address(payload.receiver_address) receiver_asset.key = payload.key receiver_asset.url = payload.url receiver_asset.file_name = payload.file_name receiver_asset.file_hash = payload.file_hash receiver_asset.master_key = payload.master_key receiver_asset.master_url = payload.master_url receiver_asset.issuer_child_zero_pub = payload.issuer_child_zero_pub if payload.scope: receiver_asset.scope.group = payload.scope.group receiver_asset.scope.sub_group = payload.scope.sub_group receiver_asset.scope.field = payload.scope.field receiver_asset.scope.nature = payload.scope.nature receiver_asset.scope.operations = payload.scope.operations receiver_asset.scope.description = payload.scope.description """ signature_list = [] if payload.signatures: sig = create_empty_sig() for signature in payload.signatures: sig.ParseFromString(signature.encode()) signature_list.append(sig) receiver_asset.authenticity_signatures = signature_list """ receiver_asset.ownership_received = payload.issuer_address receiver_asset.received_on = payload.indiantime receiver_asset.parent_address = issuer_account_address self._context.set_state( {payload.issuer_address: issuer_asset.SerializeToString()}, self._timeout) self._context.set_state( {payload.receiver_address: receiver_asset.SerializeToString()}, self._timeout) return
async def deserialize_float_account(REST_API_URL, address): state_data = await address_state(REST_API_URL, address) ##decoding data stored on the flt_acc = float_account_pb2.FloatAccount() flt_acc.ParseFromString(state_data) float_account = MessageToDict(flt_acc, preserving_proto_field_name=True) float_account.update({"address": address}) ##this is to handle accounts which havent claimed their account if float_account.get("claimed_by"): account_address = addresser.create_organization_account_address( float_account["claimed_by"], 0) else: account_address = None data = {"claimed": None, "claimed_by": None, "claimed_on": None} float_account.update(data) float_account.update({"account_address": account_address}) if not float_account.get("child_zero_pub"): float_account.update({"child_zero_pub": None}) return float_account
def upload(issuer, receiver): headers = get_headers_on_email(issuer["email"], issuer["password"]) receiver = db_find_on_key_pending(receiver["email"]) address = addresser.float_account_address(receiver["parent_pub"], receiver["parent_idx"]) file_hash, base64_file_bytes, file_name = generate_file_like() data = { "file_name": file_name, "base64_file_bytes": base64_file_bytes, "file_hash": file_hash, "scope": create_scope(), "expired_on": revoke_time_stamp(days=100, hours=1, minutes=10), "address": address } r = requests.post("http://localhost:8000/assets/upload", data=json.dumps(data), headers=headers) logging.info(json.dumps(r.json(), indent=4)) if r.json()["error"]: logging.info("Since this account has already been claimed") logging.info("Trying upload with organization account") receiver = db_find_on_key(receiver["email"]) address = addresser.create_organization_account_address( receiver["acc_zero_pub"], 0) data.update({"address": address}) r = requests.post("http://localhost:8000/assets/upload", data=json.dumps(data), headers=headers) logging.info(json.dumps(r.json(), indent=4)) return r.json()["data"]["issuer_address"], r.json( )["data"]["receiver_address"]
async def submit_share_asset(app, requester, asset_address, receive_asset_address, unique_code, revoked_on, comments): """ 1.check whether asset_address is valid asset address or not 2. check whether the asset is empty or not 3.check whether the asset has been transsefred the ownership to someother empty asset 4.check whether the requester is the owner of this asset or not 5.check whether the receiver_asset_address is a valid receive_asset_address or not 6.check whether at_which_asset_expires is stil valid or hasnt expired 7.cheque whether the sha_2224 hash of unique code matches with receiver_asset """ f = await userapis.SolveAccount(requester, app) decrypted_mnemonic = f.decrypted_mnemonic org_state = f.org_state logging.info(f"THis is the decrypted mnemonic {decrypted_mnemonic}") share_asset_idxs = f.org_state.get("share_asset_idxs") child_user_id = f.child_user_id child_zero_pub = f.child_zero_pub account_zero_pub = f.zero_pub unique_code_hash = hashlib.sha224(str(unique_code).encode()).hexdigest() if await share_assets_query.find_share_asset(app, asset_address, receive_asset_address): raise ApiInternalError("This shared asset has already been done") ##checking point 5 receive_asset_instance = await userapis.SolveAddress( receive_asset_address, app.config.REST_API_URL) if receive_asset_instance.type != "RECEIVE_ASSET": raise AssetError("receive_asset_address is notreceive asset address") if not receive_asset_instance.data[ "at_which_asset_expires"] > upload_utils.now_time_stamp(): raise errors.InvalidValidityPeriod("The time to share asset with this \ address has been expired") if receive_asset_instance.data["unique_code_hash"] !=\ unique_code_hash: raise AssetError( "Unique code provided is either wrong or meant for different receiver_address" ) asset_instance = await userapis.SolveAddress(asset_address, app.config.REST_API_URL) if asset_instance.type != "CREATE_ASSET": raise AssetError("asset_address is not asset address") ##check point 2 if not asset_instance.data["file_name"] or not asset_instance.data[ "file_hash"]: raise AssetError("Empty assets cannot be shared") ##decrypting issuer mnemonic requester_account_address = addresser.create_organization_account_address( account_id=org_state["public"], index=0) ##Check if the asset had been transffered to the issuer i.e issets which were ###not created by the issuer cant be transffered to other users if asset_instance.data.get("ownership_transfer"): message = f"This asset which already have been transffered to \ {issuer_asset.get('ownership_transfer')} can be shared" logging.error(message) raise AssetError(message) key_index = await ledger_utils.generate_key_index(share_asset_idxs) logging.info(f"THis is the key index for issuer {key_index}") ##at which the asset was created asset_index = asset_instance.data["idx"] nth_keys = await remote_calls.key_index_keys(app, decrypted_mnemonic, [0, key_index, asset_index]) ##account kets for the issuer requester_zeroth_priv, requester_zeroth_pub = \ nth_keys[str(0)]["private_key"], \ nth_keys[str(0)]["public_key"] ##keys at which teh asset which needs to be shared was floated create_asset_priv, create_asset_pub = nth_keys[str(asset_index)]["private_key"], \ nth_keys[str(asset_index)]["public_key"] ##keys at which the shared asset index will be floated share_asset_priv, share_asset_pub = nth_keys[str(key_index)]["private_key"], \ nth_keys[str(key_index)]["public_key"] ##check if issuer n th public key is exactly the public key mentioned in the ##asset transaction present on the blockchain, this also checks whether ##the requester is actually the owner of the asset if create_asset_pub != asset_instance.data.get("public"): logging.error("This asset address is not owned by the issuer") raise AssetError("This asset address is not owned by the issuer") ##decrypting file data stored ob the issuer asset address, this can be ##done by issuer private key present on the nth index data = await asset_utils.decrypt_file_data( asset_instance.data["key"], asset_instance.data["url"], asset_instance.data["file_hash"], create_asset_priv) ##TODO: check file_hash file_data = { "data": data, "file_name": asset_instance.data["file_name"], "file_hash": asset_instance.data["file_hash"] } ##encrypting again with the public key present at the receiver_asset_address key, encrypted_key, s3_url, encrypted_s3_url = \ await asset_utils.encrypt_file_data(None, receive_asset_instance.data["public"], app.config, file_data) logging.info(f"This is the key {key} , encrypted_key{encrypted_key} \ and the s3_url {s3_url}") master_key, master_url = await asset_utils.master_url_n_key( app.config.ADMIN_ZERO_PUB, key, s3_url) ##Now this transaction should be signed by user create_asset_signer = ledger_utils.create_signer(share_asset_priv) ##for added security we will send a nonce signed by issuer account ##private key nonce = random.randint(2**20, 2**30) nonce_hash = hashlib.sha224(str(nonce).encode()).hexdigest() account_hex_signature = signatures.ecdsa_signature(requester_zeroth_priv, nonce) ##nonce must also be signed with the private key at random index at which ##create asset is present asset_hex_signature = signatures.ecdsa_signature(create_asset_priv, nonce) transaction_data = { "config": app.config, "txn_key": create_asset_signer, "batch_key": app.config.SIGNER, "key": encrypted_key, "url": encrypted_s3_url, "master_key": master_key, "master_url": master_url, "time": int(time.time()), "indiantime": upload_utils.indian_time_stamp(), "file_name": asset_instance.data["file_name"], "file_hash": asset_instance.data["file_hash"], "original_asset_address": asset_address, "revoked_on": revoked_on, "comments": comments, "idx": key_index, "account_signature": account_hex_signature, "asset_signature": asset_hex_signature, "nonce": nonce, "nonce_hash": nonce_hash, "to_org_name": receive_asset_instance.data["org_name"], "to_org_address": receive_asset_instance.data["org_address"], "issuer_account_address": requester_account_address, "receive_asset_address": receive_asset_address, "child_zero_pub": child_zero_pub, "unique_code_hash": unique_code_hash, } transaction_ids, batch_id = await send_share_asset(**transaction_data) if transaction_ids: logging.info("Share Transaction has been created successfully") ##which imlies the transaction has been submitted successfully, ##now all the changes that are required to be done on the databse can ##be done ##Update users create_asset_idxs key on users entry will be updated by ## whomever will call this, because update can happend on pending_users share_asset_address = addresser.share_asset_address( share_asset_pub, key_index) account_signature = account_hex_signature.decode() asset_signature = asset_hex_signature.decode() transaction_data.update({ "transaction_id": transaction_ids[0], "batch_id": batch_id, "account_signature": account_signature, "asset_signature": asset_signature, "address": share_asset_address }) [ transaction_data.pop(field) for field in ["config", "txn_key", "batch_key"] ] await share_assets_query.store_share_asset(app, transaction_data) ##now asset must be update with share_with key await assets_query.update_issuer_asset_shared(app, asset_address, key_index) ##TOFO update receiver_address asset and issuer_asset_Address in DB await accounts_query.update_share_asset_idxs(app, org_state["user_id"], key_index) if child_user_id: await accounts_query.update_share_asset_idxs( app, child_user_id, key_index) return share_asset_address else: return False return
async def send_child_account(**in_data): """ txn_key(sawtooth_signing.Signer): signer created from user zeroth public key batch_key(sawtooth_signing.Signer): signer created from QCI mnemonic zero private key, pancard(str): pancard of the user , phone_number(str): phone_number of the user, email(str): email of the user, claimed(bool): If this float account is claimed or not, claimed_by(str): Public key of the user for whom this float_acc transaction, create_asset_index(int): random key index at which the first asset was created, parent_pub(str): public key of the parent , parent_idx(str): Required to be appened to parent accoutn flt_key_inds, key_index, time=time.time(); indian_time=indian_time_stamp(), claimed_on(str): Date on which this flt account was claimed and converted to create account) """ inputs = [ addresser.create_organization_account_address( account_id=in_data["parent_zero_pub"], index=0), addresser.child_account_address( account_id=in_data["txn_key"].get_public_key().as_hex(), index=0), ] outputs = [ addresser.create_organization_account_address( account_id=in_data["parent_zero_pub"], index=0), addresser.child_account_address( account_id=in_data["txn_key"].get_public_key().as_hex(), index=0), ] account = payload_pb2.CreateChildAccount( parent_zero_pub=in_data["parent_zero_pub"], parent_idx=in_data["parent_idx"], parent_role=in_data["parent_role"], org_name=in_data["org_name"], first_name=in_data["first_name"], last_name=in_data["last_name"], user_id=in_data["user_id"], pancard = in_data["pancard"], gst_number=in_data["gst_number"], tan_number=in_data["tan_number"], phone_number = in_data["phone_number"], email=in_data["email"], time=in_data["time"], indian_time=in_data["indian_time"], role = in_data["role"], deactivate=in_data["deactivate"], deactivate_on=in_data["deactivate_on"], nonce=in_data["nonce"], nonce_hash=in_data["nonce_hash"], signed_nonce=in_data["signed_nonce"], ) logging.info(account) logging.info(f"THe address for the user on blockchain {inputs[0]}") payload = payload_pb2.TransactionPayload( payload_type=payload_pb2.TransactionPayload.CREATE_CHILD_ACCOUNT, create_child_account=account) logging.info(payload) transaction_ids, batches, batch_id, batch_list_bytes= make_header_and_batch( payload=payload, inputs=inputs, outputs=outputs, txn_key=in_data["txn_key"], batch_key=in_data["batch_key"]) logging.info(f"This is the batch_id {batch_id}") rest_api_response = await messaging.send( batch_list_bytes, in_data["config"]) try: result = await messaging.wait_for_status(batch_id, in_data["config"]) except (ApiBadRequest, ApiInternalError) as err: #await auth_query.remove_auth_entry(request.app.config.DB_CONN, request.json.get('email')) raise err return False, False return transaction_ids, batch_id
def create_organization_account(**in_data): """Create a CreateAccount txn and wrap it in a batch and list. need to change two addresses create a account from user zeroth key edit float_accout address from parent nindex key and marked it claimed Args: txn_key (sawtooth_signing.Signer): The Txn signer key pair. batch_key (sawtooth_signing.Signer): The Batch signer key pair. label (str): The account's label. description (str): The description of the account. Returns: tuple: List of Batch, signature tuple """ inputs = [ addresser.create_organization_account_address( account_id=in_data["txn_key"].get_public_key().as_hex(), index=0), ] outputs = [ addresser.create_organization_account_address( account_id=in_data["txn_key"].get_public_key().as_hex(), index=0), ] if in_data["role"] != "ADMIN": inputs.append( addresser.float_account_address(account_id=in_data["parent_pub"], index=in_data["parent_idx"])) outputs.append( addresser.float_account_address(account_id=in_data["parent_pub"], index=in_data["parent_idx"])) if in_data.get("parent_pub"): logging.info(f"This is the parent pub {in_data['parent_pub']}") account = payload_pb2.CreateOrganizationAccount( role=in_data["role"], parent_role=in_data["parent_role"], phone_number=in_data["phone_number"], pancard=in_data["pancard"], user_id=in_data["user_id"], email=in_data["email"], org_name=in_data["org_name"], gst_number=in_data["gst_number"], tan_number=in_data["tan_number"], time=in_data["time"], indian_time=in_data["indian_time"], parent_zero_pub=in_data["parent_zero_pub"], deactivate=in_data["deactivate"], deactivate_on=in_data["deactivate_on"], create_asset_idxs=in_data["create_asset_idxs"], parent_pub=in_data["parent_pub"], parent_idx=in_data["parent_idx"], float_account_address=in_data["float_account_address"], ) logging.info(account) logging.info(f"THe address for the user on blockchain {inputs[0]}") payload = payload_pb2.TransactionPayload( payload_type=payload_pb2.TransactionPayload. CREATE_ORGANIZATION_ACCOUNT, create_organization_account=account) logging.info(payload) return make_header_and_batch(payload=payload, inputs=inputs, outputs=outputs, txn_key=in_data["txn_key"], batch_key=in_data["batch_key"])
async def submit_child_account(app, parent_org, child): """ Decrypt parent_org menmonic with the ADMIN private key Get orgnization account for parent_org Generate a random index at the child_account_idxs array of the Get Public/Private key pair at random_indexfrom parent_org mnemonic Generate child_address from this pair and index Signed nonce with zeroth public key of the parent_org """ decrypted_mnemonic = await ledger_utils.decrypted_user_mnemonic( app, parent_org["encrypted_admin_mnemonic"], parent_org["role"]) logging.info(decrypted_mnemonic) org_address = addresser.create_organization_account_address( parent_org["acc_zero_pub"], 0) org_account = await deserialize_state.deserialize_org_account( app.config.REST_API_URL, org_address) logging.info(org_account) ##lets chaeck if the parent user_i child_account_idxs = org_account.get("child_account_idxs") ##This will generate a new key which doesnt exists in the flt_acc_idxs array key_index = await ledger_utils.generate_key_index(child_account_idxs) logging.info(f"THis is the key index for parent {key_index}") nth_keys = await remote_calls.key_index_keys(app, decrypted_mnemonic, [key_index, 0]) org_nth_priv, org_nth_pub = nth_keys[str(key_index)]["private_key"], \ nth_keys[str(key_index)]["public_key"] ##getting zeroth private key to be used later org_zeroth_priv, org_zeroth_pub = nth_keys[str(0)]["private_key"], \ nth_keys[str(0)]["public_key"] ##signer created from the parent key signer = upload_utils.create_signer(org_nth_priv) ##sending signatures, A nonce signed by zeroth_private_key nonce = random.randint(2**20, 2**31) nonce_hash = hashlib.sha224(str(nonce).encode()).hexdigest() hex_signatures = signatures.ecdsa_signature(org_zeroth_priv, nonce) ##import from ledger.account import float_account, other then create_asset_idxs ## wil be emprty for the float_account, if we push empty list on blockchain ##it wil hsow an error, its better to not to send them at the first place transaction_data = { "config": app.config, "txn_key": signer, "batch_key": app.config.SIGNER, "parent_idx": key_index, "parent_zero_pub": org_zeroth_pub, "parent_role": parent_org["role"], "first_name": child["first_name"], "last_name": child["last_name"], "org_name": child["org_name"], "user_id": child["user_id"], "pancard": child["pancard"], "gst_number": child["gst_number"], "tan_number": child["tan_number"], "phone_number": child["phone_number"], "email": child["email"], "time": int(time.time()), "indian_time": upload_utils.indian_time_stamp(), "role": "CHILD", "deactivate": False, "deactivate_on": None, "nonce": nonce, "nonce_hash": nonce_hash, "signed_nonce": hex_signatures } transaction_ids, batch_id = await send_child_account(**transaction_data) logging.info(batch_id) if batch_id: ##if successful, insert this user in pending_users table child.update({ "parent_idx": key_index, "public": org_nth_pub, "transaction_id": transaction_ids[0], "batch_id": batch_id, "parent_zero_pub": org_zeroth_pub, "parent_role": parent_org["role"], "nonce": nonce, "nonce_hash": nonce_hash, "signed_nonce": hex_signatures.decode(), "time": transaction_data["time"], "indian_time": transaction_data["indian_time"], "role": "CHILD", "deactivate": False, "deactivate_on": None, }) logging.debug(child) await accounts_query.insert_account(app, child) ##update child_account_idxs array of the parent_org await accounts_query.update_child_account_idxs(app, parent_org["user_id"], key_index) return child
def create_float_account(**in_data): """Create a CreateAccount txn and wrap it in a batch and list. Args: txn_key(sawtooth_signing.Signer): signer created from user zeroth public key parent_zero_pub(string): zeroth account key of the pub who floated this trnasaction batch_key(sawtooth_signing.Signer): signer created from QCI mnemonic zero private key, pancard(str): pancard of the user , phone_number(str): phone_number of the user, email(str): email of the user, claimed(bool): If this float account is claimed or not, claimed_by(str): Public key of the user for whom this float_acc transaction, create_asset_index(int): random key index at which the first asset was created, parent_pub(str): public key of the parent , parent_idx(str): Required to be appened to parent accoutn flt_key_inds, key_index, time=time.time(); indian_time=indian_time_stamp(), claimed_on(str): Date on which this flt account was claimed and converted to create account) parent_zero_pub: parent zero pub required for calcualting parent address parent_role=parent["role"], user_role=user_data["role"] Returns: tuple: List of Batch, signature tuple """ logging.info(f"THis is the data received in trsaction ceratrion {in_data}") inputs = [ addresser.create_organization_account_address( account_id=in_data["parent_zero_pub"], index=0), addresser.float_account_address( account_id=in_data["txn_key"].get_public_key().as_hex(), index=in_data["parent_idx"]) ] logging.info( f"THe account address for the parent on blockchain {inputs[0]}") logging.info(f"THe float account address for the user {inputs[1]}") outputs = [ addresser.create_organization_account_address( account_id=in_data["parent_zero_pub"], index=0), addresser.float_account_address( account_id=in_data["txn_key"].get_public_key().as_hex(), index=in_data["parent_idx"]) ] if in_data["child_zero_pub"]: child_address = addresser.child_account_address( account_id=in_data["child_zero_pub"], index=0) logging.info(f"CHILD address is {child_address}") inputs.append(child_address) outputs.append(child_address) logging.info(f"INPUTS ADDRESSES --<{inputs}>--") logging.info(f"OUTPUTS ADDRESSES --<{outputs}>--") float_account = payload_pb2.CreateFloatAccount( claimed_on=in_data["claimed_on"], org_name=in_data["org_name"], pancard=in_data["pancard"], gst_number=in_data["gst_number"], tan_number=in_data["tan_number"], phone_number=in_data["phone_number"], email=in_data["email"], claimed=in_data["claimed"], claimed_by=in_data["claimed_by"], create_asset_idxs=in_data["create_asset_idxs"], parent_idx=in_data["parent_idx"], time=in_data["time"], indian_time=in_data["indian_time"], parent_role=in_data["parent_role"], role=in_data["role"], parent_zero_pub=in_data["parent_zero_pub"], nonce=in_data["nonce"], nonce_hash=in_data["nonce_hash"], signed_nonce=in_data["signed_nonce"], child_zero_pub=in_data["child_zero_pub"]) logging.info(float_account) logging.info( f"THe serialized protobuf for float_account is {float_account}") payload = payload_pb2.TransactionPayload( payload_type=payload_pb2.TransactionPayload.CREATE_FLOAT_ACCOUNT, create_float_account=float_account) return make_header_and_batch(payload=payload, inputs=inputs, outputs=outputs, txn_key=in_data["txn_key"], batch_key=in_data["batch_key"])
def set_asset(self, public, payload): """ payload will have the CreateAsset in the payload public: hex public key with whose private key transaction was signed account: could be a float account or account account_type: could be FLOAT_ACCOUNT or CREATE_ACCOUNT """ logging.info("Payload in set_asset <<{}>>".format(payload)) if payload.flt_account_parent_pub: account_address = addresser.float_account_address( account_id=payload.flt_account_parent_pub, index=payload.flt_account_parent_idx) logging.info("Updating create_asset_idxs in float_account\ at {}".format(account_address)) float_account = self.get_flt_account( public_key=payload.flt_account_parent_pub, index=payload.flt_account_parent_idx) self.update_asset_index(account_address, float_account, payload.idx) else: account_address = addresser.create_organization_account_address( account_id=payload.zero_pub, index=0) logging.info("Updating create_asset_idxs in \ organization_account at {}".format(account_address)) organization_account = self.get_organization( public_key=payload.zero_pub) self.update_asset_index(account_address, organization_account, payload.idx) ##if this is present that means that this asset is being created by child ##of the organization, so the payload.idx needs to be appended to the ##create_asset_idxs array of the child too if payload.child_zero_pub: account_address = addresser.child_account_address( account_id=payload.child_zero_pub, index=0) child_account = self.get_child(payload.child_zero_pub, 0) self.update_asset_index(account_address, child_account, payload.idx) address = addresser.create_asset_address(asset_id=public, index=payload.idx) asset = create_empty_asset() asset.key = payload.key asset.url = payload.url asset.time = payload.time asset.indiantime = payload.indiantime asset.file_name = payload.file_name asset.file_hash = payload.file_hash asset.idx = payload.idx asset.master_key = payload.master_key asset.master_url = payload.master_url asset.expired_on = payload.expired_on asset.role = payload.role asset.public = public asset.child_zero_pub = payload.child_zero_pub if payload.scope: asset.scope.group = payload.scope.group asset.scope.sub_group = payload.scope.sub_group asset.scope.field = payload.scope.field asset.scope.nature = payload.scope.nature asset.scope.operations = payload.scope.operations asset.scope.description = payload.scope.description logging.info(asset) logging.info("Account after serialization %s", asset.SerializeToString()) return self._context.set_state({address: asset.SerializeToString()}, self._timeout)
async def submit_receive_asset(app, requester, _id_, name, description, at_which_asset_expires): """ """ f = await userapis.SolveAccount(requester, app) decrypted_mnemonic = f.decrypted_mnemonic logging.info(f"THis is the decrypted mnemonic {decrypted_mnemonic}") org_db_entry = f.org_db receive_asset_idxs = f.org_state.get("receive_asset_idxs") child_user_id = f.child_user_id child_zero_pub = f.child_zero_pub account_zero_pub = f.zero_pub key_index = await ledger_utils.generate_key_index(array=receive_asset_idxs) nth_keys = await remote_calls.key_index_keys(app, decrypted_mnemonic, [key_index, 0]) nth_priv, nth_pub = nth_keys[str(key_index)]["private_key"], \ nth_keys[str(key_index)]["public_key"] org_priv, org_pub = nth_keys[str(0)]["private_key"], \ nth_keys[str(0)]["public_key"] org_account_address = addresser.create_organization_account_address( account_id=account_zero_pub, index=0) instance = await userapis.SolveAddress(org_account_address, app.config.REST_API_URL) org_state = instance.data ##the transaction will be signed by users nth private key create_asset_signer = ledger_utils.create_signer(nth_priv) ##we havent included the child_nth_pub in this transaction because it ## can be calculated from txn_key on the processor side ##for added security we will send a nonce signed by issuer account ##private key nonce = random.randint(2**20, 2**30) nonce_hash = hashlib.sha224(str(nonce).encode()).hexdigest() ##nonce signed by zerothprivate key and in hex format hex_signatures = signatures.ecdsa_signature(org_priv, nonce) receive_asset_address = addresser.receive_asset_address(asset_id=nth_pub, index=key_index) unique_code = int("".join(map(str, random.choices(list(range(1, 10)), k=5)))) unique_code_hash = hashlib.sha224(str(unique_code).encode()).hexdigest() encrypted_unique_code = encryption_utils.encrypt_w_pubkey( str(unique_code).encode(), nth_pub) encrypted_admin_unique_code = encryption_utils.encrypt_w_pubkey( str(unique_code).encode(), app.config.ADMIN_ZERO_PUB) transaction_data = { "config": app.config, "txn_key": create_asset_signer, "batch_key": app.config.SIGNER, "_id_": _id_, "time": int(time.time()), "indiantime": upload_utils.indian_time_stamp(), "idx": key_index, "at_which_asset_expires": at_which_asset_expires, "org_name": org_state["org_name"], "org_address": org_account_address, "org_zero_pub": org_pub, "org_role": org_state["role"], "receive_asset_details": { "name": name, "description": description }, "child_zero_pub": child_zero_pub, "signed_nonce": hex_signatures, "nonce": nonce, "nonce_hash": nonce_hash, "unique_code_hash": unique_code_hash, "encrypted_unique_code": encrypted_unique_code, "encrypted_admin_unique_code": encrypted_admin_unique_code } logging.info(f"THis is the transaction data in receive_asset") logging.info(pprint(transaction_data)) transaction_ids, batch_id = await send_receive_asset(**transaction_data) if batch_id: [ transaction_data.pop(field) for field in ["config", "txn_key", "batch_key"] ] signed_nonce = transaction_data["signed_nonce"].decode() transaction_data.update({ "user_id": requester["user_id"], "public": nth_pub, "transaction_id": transaction_ids[0], "batch_id": batch_id, "signed_nonce": signed_nonce, "unique_code": unique_code }) await receive_assets_query.store_receive_assets(app, transaction_data) await accounts_query.update_receive_assets_idxs( app, org_db_entry["user_id"], key_index) ##if this receive_asset is created by child of the organization ##then update the child account receive_asset_idxs array also if child_user_id: await accounts_query.update_receive_assets_idxs( app, child_user_id, key_index) #await accounts_query.update_create_asst_idxs_pending(app, #requester["user_id"], key_index) return nth_pub, key_index, receive_asset_address else: logging.error("Create asset Faied, GO to hell Dude!!!!,\ Kabhi kabhi lagta hai ki bhagwan hone ka bhi kya fayda") return
def create_asset(**in_data): """ Inputs will have asset_address, account_address (The key index will be appended to the account address) float_accout (if the user only has a float_account till now, key_index will be appended to the float_account address) child_account_address (In case the asset being created by the child) """ ##TODO: Processor side : Float this asset and make change to create_asset_idxs ## to either float_Account_Address or create_Account_Address depending upon ##whther the user has been claimed or not inputs = [ addresser.create_asset_address( asset_id=in_data["txn_key"].get_public_key().as_hex(), index=in_data["idx"]), ] outputs = [ addresser.create_asset_address( asset_id=in_data["txn_key"].get_public_key().as_hex(), index=in_data["idx"]) ] ##ideally if account is claimed, we should have nothing to do with float account ## but we are sending both addresses to the processor and let processor handle ## the logic i.e float_account should exists and is_claimed shall be true ##to append create_asset_idxs to the account_transaction if not in_data["is_acc_claimed"]: ##implies user havent claimed his float_account_address, so the ## create_asset_idx aill be chnaged on flt_account_addresslogging.info("Float account parent pub %s"%in_data["flt_account_parent_pub"]) logging.info("Float account parent idx %s" % str(in_data["flt_account_parent_idx"])) float_account_address = addresser.float_account_address( account_id=in_data["flt_account_parent_pub"], index=in_data["flt_account_parent_idx"]) inputs.append(float_account_address) outputs.append(float_account_address) else: account_address = addresser.create_organization_account_address( account_id=in_data["zero_pub"], index=0) inputs.append(account_address) outputs.append(account_address) if in_data["child_zero_pub"]: child_address = addresser.child_account_address( account_id=in_data["child_zero_pub"], index=0) inputs.append(child_address) outputs.append(child_address) if in_data["scope"]: scope = payload_pb2.PayloadScope( group=in_data["scope"]["group"], sub_group=in_data["scope"]["sub_group"], field=in_data["scope"]["field"], nature=in_data["scope"]["nature"], operations=in_data["scope"]["operations"], description=in_data["scope"]["description"], ) else: scope = None logging.info(f"Input Address<<{inputs}>>") logging.info(f"Output Address<<{outputs}>>") asset = payload_pb2.CreateAsset( key=in_data["key"], url=in_data["url"], time=in_data["time"], indiantime=in_data["indiantime"], file_name=in_data["file_name"], file_hash=in_data["file_hash"], idx=in_data["idx"], master_key=in_data["master_key"], master_url=in_data["master_url"], role=in_data["role"], scope=scope, zero_pub=in_data["zero_pub"], flt_account_parent_pub=in_data["flt_account_parent_pub"], flt_account_parent_idx=in_data["flt_account_parent_idx"], child_zero_pub=in_data["child_zero_pub"]) logging.info(f"Create asset transaction {asset}") payload = payload_pb2.TransactionPayload( payload_type=payload_pb2.TransactionPayload.CREATE_ASSET, create_asset=asset) return make_header_and_batch(payload=payload, inputs=inputs, outputs=outputs, txn_key=in_data["txn_key"], batch_key=in_data["batch_key"])
async def send_create_asset(**in_data): """ Args key(str), hex_encoded: encrypted AES key with user publickey present at random index url(str): s3 url encrypted with user public key time(str): when this asset was created indiantime(str): time in indian format file_name(str): file_name file_hash(str): sha3_512 hash of file content child_idx(int): random index parent_zero_pub(str): Parent zero public key of the parent master_key(str): encrypted s3 url, encrypted with aes key generated with qci_public and user private key master_url(str): encrypted s3 url, encrypted with aes key generated with private key of user and public of QCI scope(Scope(defined in asset.proto)): string expired_on=13; //the date on which this certificate is intended """ ##TODO: Processor side : Float this asset and make change to create_asset_idxs ## to either float_Account_Address or create_Account_Address depending upon ##whther the user has been claimed or not inputs = [ addresser.create_asset_address( asset_id=in_data["txn_key"].get_public_key().as_hex(), index=in_data["idx"]), ] outputs = [ addresser.create_asset_address( asset_id=in_data["txn_key"].get_public_key().as_hex(), index=in_data["idx"]) ] ##ideally if account is claimed, we should have nothing to do with float account ## but we are sending both addresses to the processor and let processor handle ## the logic i.e float_account should exists and is_claimed shall be true ##to append create_asset_idxs to the account_transaction if not in_data["is_acc_claimed"]: ##implies user havent claimed his float_account_address, so the ## create_asset_idx aill be chnaged on flt_account_addresslogging.info("Float account parent pub %s"%in_data["flt_account_parent_pub"]) logging.info("Float account parent idx %s" % str(in_data["flt_account_parent_idx"])) float_account_address = addresser.float_account_address( account_id=in_data["flt_account_parent_pub"], index=in_data["flt_account_parent_idx"]) inputs.append(float_account_address) outputs.append(float_account_address) else: account_address = addresser.create_organization_account_address( account_id=in_data["zero_pub"], index=0) inputs.append(account_address) outputs.append(account_address) if in_data["child_zero_pub"]: child_address = addresser.child_account_address( account_id=in_data["child_zero_pub"], index=0) inputs.append(child_address) outputs.append(child_address) if in_data["scope"]: scope = payload_pb2.PayloadScope( group=in_data["scope"]["group"], sub_group=in_data["scope"]["sub_group"], field=in_data["scope"]["field"], nature=in_data["scope"]["nature"], operations=in_data["scope"]["operations"], description=in_data["scope"]["description"], ) else: scope = None logging.info(f"Input Address<<{inputs}>>") logging.info(f"Output Address<<{outputs}>>") asset = payload_pb2.CreateAsset( key=in_data["key"], url=in_data["url"], time=in_data["time"], indiantime=in_data["indiantime"], file_name=in_data["file_name"], file_hash=in_data["file_hash"], idx=in_data["idx"], master_key=in_data["master_key"], master_url=in_data["master_url"], role=in_data["role"], scope=scope, zero_pub=in_data["zero_pub"], flt_account_parent_pub=in_data["flt_account_parent_pub"], flt_account_parent_idx=in_data["flt_account_parent_idx"], child_zero_pub=in_data["child_zero_pub"]) logging.info(f"Create asset transaction {asset}") payload = payload_pb2.TransactionPayload( payload_type=payload_pb2.TransactionPayload.CREATE_ASSET, create_asset=asset) transaction_ids, batches, batch_id, batch_list_bytes = make_header_and_batch( payload=payload, inputs=inputs, outputs=outputs, txn_key=in_data["txn_key"], batch_key=in_data["batch_key"]) logging.info(f"This is the batch_id {batch_id}") rest_api_response = await messaging.send(batch_list_bytes, in_data["config"]) try: result = await messaging.wait_for_status(batch_id, in_data["config"]) except (ApiBadRequest, ApiInternalError) as err: #await auth_query.remove_auth_entry(request.app.config.DB_CONN, request.json.get('email')) logging.error(f"Transaction failed with {err}") raise ApiInternalError(err) #raise err return transaction_ids, batch_id
async def submit_float_account(app, requester, user): ##retrive float_account from parent ## handle if the parent is actually a child account, then the flt_acc_idxs ## of the parent must be used ##elseif organization is directly creating another org account then its ## own flt_account_idxs must be used ##a junk entry needs to be sent which is A once signed by the parent orgnisation of the ## child or the organization itself zeroth_private_key, which that the float_account is ## actually being sent by the concerned authority, otherwise anyone can generate any ## random keys and then make a float_transaction because we are not checking any other details ## for cros checking if requester["role"] == "CHILD": ##now find the parent pub of this child to track the parent ##organization account ##child will get the flt_acc_idxs of the parent organization org_address = addresser.create_organization_account_address( requester["parent_zero_pub"], 0) org_account = await deserialize_state.deserialize_org_account( app.config.REST_API_URL, org_address) logging.info(org_account) ##lets chaeck if the parent user_id hash matched with the child parent_id if requester["parent_idx"] not in org_account["child_account_idxs"]: raise Exception("Child parent_idx not in parent org child_account_idxs") if requester["org_name"] != org_account["org_name"]: raise Exception("Child org_name is different from parent") ##since child was created from the PUblic key present at parent_idx at the ##parent org mnemonic, We need to get that so that we can generated child ##adddress, Remember, child_account_addresses generates from parent_org ##not with the zeroth key of the child mnemonic ##TODO: you can also check whether the child address generated from #parent org public key pair at requester parent_idx is same as requester ## address ##float_account_idxs array of the child's parent organisation flt_acc_idxs = org_account.get("float_account_idxs") ##now we need to decrypt the parent mnemonic so that we can get the Public/private key ##pair corresponding to the the random index parent_id = org_account["user_id"] logging.info(f"Parent id for the child is {parent_id} and\ float_account_idxs are {flt_acc_idxs}") org_db = await accounts_query.find_on_key(app, "user_id", parent_id) logging.info(org_db) if org_db["role"] != "ADMIN": decrypted_mnemonic = await ledger_utils.decrypted_user_mnemonic( app, org_db["encrypted_admin_mnemonic"], org_db["role"]) else: decrypted_mnemonic = app.config.ADMIN_MNEMONIC logging.info(decrypted_mnemonic) nth_keys = await remote_calls.key_index_keys(app, decrypted_mnemonic, [requester["parent_idx"]]) nth_priv, nth_pub = nth_keys[str(requester["parent_idx"])]["private_key"], \ nth_keys[str(requester["parent_idx"])]["public_key"] zero_pub = org_db["acc_zero_pub"] parent_role = org_db["role"] child_zero_pub = nth_pub else: #orgnisation itself is creating this float_account logging.info(requester) ##float_account_idxs array of the orgnization itself flt_acc_idxs = await accounts_query.get_field(app, requester["user_id"], "float_account_idxs") flt_acc_idxs = flt_acc_idxs.get("float_account_idxs") logging.info(f"Float account indxs for the orgnization {flt_acc_idxs}") decrypted_mnemonic = await ledger_utils.decrypted_user_mnemonic(app, requester["encrypted_admin_mnemonic"], requester["role"]) logging.info(decrypted_mnemonic) zero_pub = requester["acc_zero_pub"] parent_role = requester["role"] child_zero_pub = None logging.info(f"This is the decrypted mnemonic for parent {decrypted_mnemonic}") ##This will generate a new key which doesnt exists in the flt_acc_idxs array key_index = await ledger_utils.generate_key_index(flt_acc_idxs) logging.info(f"THis is the key index for parent {key_index}") nth_keys = await remote_calls.key_index_keys(app, decrypted_mnemonic, [key_index, 0]) nth_priv, nth_pub = nth_keys[str(key_index)]["private_key"], \ nth_keys[str(key_index)]["public_key"] ##getting zeroth private key to be used later zeroth_priv, zeroth_pub = nth_keys[str(0)]["private_key"], \ nth_keys[str(0)]["public_key"] flt_acc_address = addresser.float_account_address(nth_pub, key_index) logging.info(f"This is the flt acc addressfor user {flt_acc_address}") logging.info(f"Checking if valid account address has been generated\ {addresser.address_is(flt_acc_address)}") ##signer created from the parent key flt_acc_signer=upload_utils.create_signer(nth_priv) ##sending signatures, A nonce signed by zeroth_private_key nonce = random.randint(2**20, 2**31) nonce_hash = hashlib.sha224(str(nonce).encode()).hexdigest() hex_signatures = signatures.ecdsa_signature(zeroth_priv, nonce) ##hashing gst number and tan number if present if user.get("gst_number"): gst_number = hashlib.sha224(user["gst_number"].encode()).hexdigest() else: gst_number = None if user.get("tan_number"): tan_number = hashlib.sha224(user["tan_number"]\ .encode()).hexdigest() else: tan_number = None ##import from ledger.account import float_account transaction_data= {"config": app.config, "txn_key": flt_acc_signer, "batch_key": app.config.SIGNER, "org_name": user["org_name"], "pancard": hashlib.sha224(user["pancard"]\ .encode()).hexdigest(), "gst_number": gst_number, "tan_number": tan_number, "phone_number": user["phone_number"], "email": user["email"], "claimed": False, "claimed_by": None, "create_asset_idxs": [], "parent_pub": nth_pub, "parent_idx": key_index, "time": int(time.time()), "indian_time": upload_utils.indian_time_stamp(), "parent_zero_pub": zero_pub, "parent_role": parent_role, "role": user["role"], "claimed_on": None, "nonce": nonce, "nonce_hash": nonce_hash, "signed_nonce": hex_signatures, "child_zero_pub": child_zero_pub } transaction_ids, batch_id = await send_float_account(**transaction_data) if batch_id: logging.debug(user) ##if successful, insert this user in pending_users table user.update({"parent_pub": nth_pub, "parent_idx": key_index, "time": transaction_data["time"], "indian_time": transaction_data["indian_time"], "parent_zero_pub": transaction_data["parent_zero_pub"], "parent_role": transaction_data["parent_role"], "transaction_id": transaction_ids[0], "batch_id": batch_id, "child_zero_pub": child_zero_pub, }) logging.debug(f"User after submitting float_Account trasaction {user}") await accounts_query.insert_pending_account(app, user) if requester["role"] == "CHILD": ##update parent create_flt_idcs array await accounts_query.update_flt_acc_idxs(app, org_db["user_id"], key_index) ##update float_account_idxs of the child also, so that we would ##know which child created which float_account_idxs await accounts_query.update_flt_acc_idxs(app, requester["user_id"], key_index) else: await accounts_query.update_flt_acc_idxs(app, requester["user_id"], key_index) ##return new user data whose float_account has just been created return user