class DappUserService(TestCase): def setUp(self): self._repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS) @patch("common.utils.Utils.report_slack") def test_register_user_on_post_cognito_signup(self, report_slack): report_slack.return_value = None event = { 'version': '1', 'region': 'us-east-2', 'userPoolId': 'us-east-2 tyuiop', 'userName': '******', 'callerContext': { 'awsSdkVersion': 'aws-sdk-unknown-unknown', 'clientId': 'dummy_client_id_1' }, 'triggerSource': 'PostConfirmation_ConfirmSignUp', 'request': { 'userAttributes': { 'sub': '23456789-d2cb-4388-9915-3456789', 'cognito:email_alias': '*****@*****.**', 'cognito:user_status': 'CONFIRMED', 'email_verified': 'true', 'nickname': 'Piyush', 'email': '*****@*****.**' } }, 'response': {} } event_response = register_user_post_aws_cognito_signup(event, None) assert (event_response == event) def tearDown(self): self._repo.execute("DELETE FROM user")
class TestUserService(TestCase): def setUp(self): self.repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS) @patch("common.boto_utils.BotoUtils.invoke_lambda") def test_delete_user(self, mock_invoke_lambda): user_service = UserService() username = "******" account_id = "123" name = "dummy_135" status = 1 request_id = "id_123" current_time = datetime.utcnow() epoch_time = current_time.timestamp() self.repo.execute( "INSERT INTO user (username, account_id, name, email, email_verified, status, request_id, " "request_time_epoch, row_created, row_updated) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", [ username, account_id, name, username, 1, status, request_id, epoch_time, current_time, current_time ]) mock_invoke_lambda.return_value = {"statusCode": 201, "body": ""} user_service.delete_user(username) user_data = self.repo.execute( "SELECT username FROM user WHERE username = %s", [username]) if len(user_data) == 0: assert True else: assert True def tearDown(self): self.repo.execute("DELETE FROM user")
class TestWalletAPI(unittest.TestCase): def setUp(self): self.NETWORKS_NAME = dict((NETWORKS[netId]["name"], netId) for netId in NETWORKS.keys()) self.repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS) @patch("common.utils.Utils.report_slack") @patch("common.blockchain_util.BlockChainUtil.create_account") def test_create_wallet(self, mock_create_account, mock_report_slack): create_wallet_event = { "path": "/wallet", "httpMethod": "POST", "body": '{"username": "******"}', } mock_create_account.return_value = ( "323449587122651441342932061624154600879572532581", "26561428888193216265620544717131876925191237116680314981303971688115990928499", ) response = lambda_handler.request_handler(create_wallet_event, context=None) assert response["statusCode"] == 200 response_body = json.loads(response["body"]) assert response_body["status"] == "success" assert (response_body["data"]["address"] == "323449587122651441342932061624154600879572532581") assert ( response_body["data"]["private_key"] == "26561428888193216265620544717131876925191237116680314981303971688115990928499" ) assert response_body["data"]["status"] == 0 assert response_body["data"]["type"] == "GENERAL" @patch("common.utils.Utils.report_slack") def test_create_wallet_and_channel(self, mock_report_slack): pass @patch("common.utils.Utils.report_slack") def test_create_channel(self, mock_report_slack): pass @patch("common.utils.Utils.report_slack") def test_top_up_channel(self, mock_report_slack): pass @patch("common.utils.Utils.report_slack") def test_get_wallet_details(self, mock_report_slack): pass @patch("common.utils.Utils.report_slack") def test_register_wallets(self, mock_report_slack): pass @patch("common.utils.Utils.report_slack") def test_set_default_wallet(self, mock_report_slack): pass def tearDown(self): self.repo.execute("DELETE FROM wallet") self.repo.execute("DELETE FROM user_wallet")
def test_update_transaction_status(self, mock_report_slack): response = request_handler(event={}, context=None) assert (response == "success") repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS) query_response = repo.execute( "SELECT * FROM transaction_history WHERE status = %s", "ORDER_CANCELED") assert (len(query_response) > 1)
class TestMPE(TestCase): def setUp(self): self.NETWORKS_NAME = dict( (NETWORKS[netId]["name"], netId) for netId in NETWORKS.keys()) self.repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS) self.mpe = MPE(self.repo) def test_update_consumed_balance(self): mpe_repo = MPERepository(self.repo) mpe_repo.create_channel({ "channelId": 1, "sender": '0x123', "recipient": "0x345", "groupId": b"\xbc\xb0\xa1\x93Z\xa1\xab\x11\xfd\xbcX\x1c\x1cxZ\xdc.\xb6\xba\x8e\xc6\xc8C*\xd7\xa9\xea\x91\xe6'\xae\xfc", "amount": 100, "pending": 0, "nonce": 0, "expiration": 8396357, "signer": '0x987', "consumed_balance": 12 }) mpe_repo.create_channel({ "channelId": 2, "sender": '0x3432', "recipient": "0x5453", "groupId": b"\xbc\xb0\xa1\x93Z\xa1\xab\x11\xfd\xbcX\x1c\x1cxZ\xdc.\xb6\xba\x8e\xc6\xc8C*\xd7\xa9\xea\x91\xe6'\xae\xfc", "amount": 100, "pending": 0, "nonce": 1, "expiration": 8396357, "signer": '0x987', "consumed_balance": 3 }) self.assertDictEqual({}, self.mpe.update_consumed_balance(1, 13, 100, 0)) try: self.mpe.update_consumed_balance(2, 2, 100, 1) except: assert True try: self.mpe.update_consumed_balance(2, 4, 100, 0) except: assert True try: self.mpe.update_consumed_balance(2, 4, 80, 1) except: assert True def tearDown(self): self.repo.execute("DELETE FROM `mpe_channel`")
class RFAIFundRequestEventConsumer(RFAIEventConsumer): _connection = Repository(NETWORKS=NETWORK) _rfai_request_repository = RequestDAO(_connection) _stake_dao_repository = StakeDAO(_connection) def __init__(self, net_id, ws_provider, ipfs_url, ipfs_port): super().__init__(net_id, ws_provider, ipfs_url, ipfs_port) def on_event(self, event): event_data = self._get_event_data(event) request_id = event_data['requestId'] staker = event_data['staker'] amount = event_data['amount'] [ found, request_id, requester, total_fund, document_uri, expiration, end_submission, end_evaluation, status, stake_members, submitters ] = self._get_rfai_service_request_by_id(request_id) metadata_hash = self._get_metadata_hash(document_uri) rfai_metadata = eval(self._get_rfai_metadata_from_ipfs(metadata_hash)) created_at = rfai_metadata['created'] # from where we will get claim back amount self._stake_dao_repository.create_stake( request_id, staker, amount, 0, event["data"]["transactionHash"], created_at)
class RFAIApproveRequestEventConsumer(RFAIEventConsumer): _connection = Repository(NETWORKS=NETWORK) _request_dao = RequestDAO(_connection) def __init__(self, net_id, ws_provider, ipfs_url, ipfs_port): super().__init__(net_id, ws_provider, ipfs_url, ipfs_port) def on_event(self, event): # need to change this whenever we clean up it should nto be tied with db column name event_data = self._get_event_data(event) request_id = event_data['requestId'] approver = event_data['approver'] end_submission = event_data['endSubmission'] end_evaluation = event_data['endEvaluation'] expiration = event_data['expiration'] filter_params = { "status": RFAIStatusCodes.APPROVED.value, "request_actor": approver, "end_submission": end_submission, "end_evaluation": end_evaluation, "expiration": expiration } self._request_dao.update_request_for_given_request_id( request_id, filter_params)
class RFAIVoteRequestEventConsumer(RFAIEventConsumer): _connection = Repository(NETWORKS=NETWORK) _rfai_vote_repository = VoteDAO(_connection) _rfai_solution_repository = SolutionDAO(_connection) def __init__(self, net_id, ws_provider, ipfs_url, ipfs_port): super().__init__(net_id, ws_provider, ipfs_url, ipfs_port) def on_event(self, event): created_at_in_epoch = self._blockchain_util.get_created_at_for_block( block_no=event["data"]["block_no"]).get("timestamp", None) if created_at_in_epoch is not None: created_at = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(created_at_in_epoch)) event_data = self._get_event_data(event) request_id = event_data['requestId'] solution_data = self._rfai_solution_repository.get_solution_for_given_submitter_and_request_id( request_id=request_id, submitter=event_data["submitter"]) self._create_or_update_vote( request_id=request_id, voter=event_data["voter"], rfai_solution_id=solution_data["rfai_solution_id"], created_at=created_at) def _create_or_update_vote(self, request_id, voter, rfai_solution_id, created_at): self._rfai_vote_repository.create_or_update_vote( request_id=request_id, voter=voter, rfai_solution_id=rfai_solution_id, created_at=created_at)
class RFAICloseRequestEventConsumer(RFAIEventConsumer): _connection = Repository(NETWORKS=NETWORK) _request_dao = RequestDAO(_connection) def __init__(self, net_id, ws_provider, ipfs_url, ipfs_port): super().__init__(net_id, ws_provider, ipfs_url, ipfs_port) def on_event(self, event): event_data = self._get_event_data(event) request_id = event_data['requestId'] [ found, request_id, requester, total_fund, document_uri, expiration, end_submission, end_evaluation, status, stake_members, submitters ] = self._get_rfai_service_request_by_id(request_id) self._update_rfai_request_status_and_actor( request_id=request_id, status=RFAIStatusCodes.CLOSED.value, request_actor=event_data["actor"]) def _update_rfai_request_status_and_actor(self, request_id, status, request_actor): self._request_dao.update_request_for_given_request_id( request_id=request_id, update_parameters={ "status": status, "request_actor": request_actor })
class RFAIClaimBackRequestEventConsumer(RFAIEventConsumer): _connection = Repository(NETWORKS=NETWORK) _request_dao = RequestDAO(_connection) _stake_dao = StakeDAO(_connection) def __init__(self, net_id, ws_provider, ipfs_url, ipfs_port): super().__init__(net_id, ws_provider, ipfs_url, ipfs_port) def on_event(self, event): event_data = self._get_event_data(event) request_id = event_data['requestId'] [ found, request_id, requester, total_fund, document_uri, expiration, end_submission, end_evaluation, status, stake_members, submitters ] = self._get_rfai_service_request_by_id(request_id) claim_back_amount = event_data["amount"] request_data = self._request_dao.get_request_data_for_given_requester_and_status( filter_parameter={"request_id": request_id}) self._connection.begin_transaction() try: self._request_dao.update_request_for_given_request_id( request_id=request_id, update_parameters={"fund_total": total_fund}) self._stake_dao.update_stake_for_given_request_id( request_id=request_id, update_parameters={"claim_back_amount": 0}) self._connection.commit_transaction() except Exception as e: logger.info( f"Transaction Rollback for event {event}. Error::{repr(e)}") self._connection.rollback_transaction() raise e
class MPEEventConsumer(EventConsumer): _mpe_repository = MPERepository(Repository(NETWORK_ID, NETWORKS=NETWORKS)) def __init__(self, ws_provider): self.blockchain_util = BlockChainUtil("WS_PROVIDER", ws_provider) def on_event(self, event): net_id = NETWORK_ID base_contract_path = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..', 'node_modules', 'singularitynet-platform-contracts')) mpe_contract = self.blockchain_util.get_contract_instance( base_contract_path, "MPE", net_id) logger.info(f"processing mpe event {event}") event_name = event["name"] event_data = event["data"] mpe_data = eval(event_data['json_str']) channel_id = int(mpe_data['channelId']) if event_name == 'ChannelOpen': self._mpe_repository.create_channel(mpe_data) else: channel_data = mpe_contract.functions.channels(channel_id).call() group_id = base64.b64encode(channel_data[4]).decode('utf8') self._mpe_repository.update_channel(channel_id=channel_id, group_id=group_id, channel_data=channel_data)
class RFAIAddSolutionRequestEventConsumer(RFAIEventConsumer): _connection = Repository(NETWORKS=NETWORK) _rfai_solution_repository = SolutionDAO(_connection) def __init__(self, net_id, ws_provider, ipfs_url, ipfs_port): super().__init__(net_id, ws_provider, ipfs_url, ipfs_port) def on_event(self, event): created_at_in_epoch = self._blockchain_util.get_created_at_for_block( block_no=event["data"]["block_no"]).get("timestamp", None) if created_at_in_epoch is not None: created_at = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(created_at_in_epoch)) event_data = self._get_event_data(event) request_id = event_data['requestId'] result = self._get_rfai_service_request_by_id(request_id) [ found, request_id, requester, total_fund, document_uri, expiration, end_submission, end_evaluation, status, stake_members, submitters ] = self._get_rfai_service_request_by_id(request_id) self._rfai_solution_repository.create_or_update_solution( request_id=request_id, submitter=event_data["submitter"], doc_uri=event_data["solutionDocURI"], claim_amount=0, created_at=created_at)
class ServiceEventConsumer(EventConsumer): _connection = Repository(NETWORK_ID, NETWORKS=NETWORKS) _service_repository = ServiceRepository(_connection) def __init__(self, ws_provider, ipfs_url, ipfs_port): self._blockchain_util = BlockChainUtil("WS_PROVIDER", ws_provider) self._s3_util = S3Util(S3_BUCKET_ACCESS_KEY, S3_BUCKET_SECRET_KEY) self._ipfs_util = IPFSUtil(ipfs_url, ipfs_port) def on_event(self, event): pass def _fetch_tags(self, registry_contract, org_id_hex, service_id_hex): tags_data = registry_contract.functions.getServiceRegistrationById( org_id_hex, service_id_hex).call() return tags_data def _get_org_id_from_event(self, event): event_data = event['data'] service_data = eval(event_data['json_str']) org_id_bytes = service_data['orgId'] org_id = Web3.toText(org_id_bytes).rstrip("\x00") return org_id def _get_service_id_from_event(self, event): event_data = event['data'] service_data = eval(event_data['json_str']) service_id_bytes = service_data['serviceId'] service_id = Web3.toText(service_id_bytes).rstrip("\x00") return service_id def _get_metadata_uri_from_event(self, event): event_data = event['data'] service_data = eval(event_data['json_str']) metadata_uri = Web3.toText( service_data['metadataURI'])[7:].rstrip("\u0000") return metadata_uri def _get_registry_contract(self): net_id = NETWORK_ID base_contract_path = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..', 'node_modules', 'singularitynet-platform-contracts')) registry_contract = self._blockchain_util.get_contract_instance( base_contract_path, "REGISTRY", net_id) return registry_contract def _get_service_details_from_blockchain(self, event): logger.info(f"processing service event {event}") registry_contract = self._get_registry_contract() org_id = self._get_org_id_from_event(event) service_id = self._get_service_id_from_event(event) tags_data = self._fetch_tags(registry_contract=registry_contract, org_id_hex=org_id.encode("utf-8"), service_id_hex=service_id.encode("utf-8")) return org_id, service_id, tags_data
class RFAIFundRequestEventConsumer(RFAIEventConsumer): _connection = Repository(NETWORKS=NETWORK) _request_dao = RequestDAO(_connection) _stake_dao = StakeDAO(_connection) _fund_request_trxn_dao = FundRequestTransactionDAO(_connection) def __init__(self, net_id, ws_provider, ipfs_url, ipfs_port): super().__init__(net_id, ws_provider, ipfs_url, ipfs_port) def on_event(self, event): created_at_in_epoch = self._blockchain_util.get_created_at_for_block( block_no=event["data"]["block_no"]).get("timestamp", None) if created_at_in_epoch is not None: created_at = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(created_at_in_epoch)) event_data = self._get_event_data(event) request_id = event_data['requestId'] staker = event_data['staker'] funded_amount = event_data['amount'] [ found, request_id, requester, total_fund, document_uri, expiration, end_submission, end_evaluation, status, stake_members, submitters ] = self._get_rfai_service_request_by_id(request_id) found, stake_amount = self._get_stake_by_id(request_id=request_id, staker=staker) if not found: stake_amount = 0 request_fund = self._request_dao.get_request_data_for_given_requester_and_status( filter_parameter={"request_id": request_id })[0]["request_fund"] + funded_amount self._connection.begin_transaction() try: self._stake_dao.create_or_update_stake( request_id=request_id, stake_member=staker, stake_amount=0, claim_back_amount=stake_amount, created_at=created_at) self._stake_dao.add_stake_amount(request_id=request_id, stake_member=staker, stake_amount=funded_amount) self._fund_request_trxn_dao.persist_transaction( stake_member=requester, transaction_hash=event["data"]["transactionHash"], created_at=created_at) self._request_dao.update_request_for_given_request_id( request_id=request_id, update_parameters={ "request_fund": request_fund, "fund_total": total_fund }) self._connection.commit_transaction() except Exception as e: logger.info( f"Transaction Rollback for event {event}. Error::{repr(e)}") self._connection.rollback_transaction() raise e
def test_cancel_given_order(self, mock_report_slack): event = { "path": "/orchestrator/order/Fb736cfa-dae4-11e9-9769-26327914c219/cancel", "pathParameters": { "order_id": "Fb736cfa-dae4-11e9-9769-26327914c219" }, "httpMethod": "GET" } response = cancel_given_order(event=event, context=None) assert (response["statusCode"] == 200) response_body = json.loads(response["body"]) assert (response_body["status"] == "success") repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS) query_response = repo.execute( "SELECT * FROM transaction_history WHERE order_id = %s AND status = %s", ["Fb736cfa-dae4-11e9-9769-26327914c219", "ORDER_CANCELED"]) assert (len(query_response) == 1)
def setUp(self): self.repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS) username = "******" account_id = "123" name = "dummy_user_2" status = 1 request_id = "id_123" user_one_row_id = 101 current_time = datetime.utcnow() epoch_time = current_time.timestamp() self.repo.execute( "INSERT INTO user (row_id, username, account_id, name, email, email_verified, status, request_id, " "request_time_epoch, row_created, row_updated) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", [ user_one_row_id, username, account_id, name, username, 1, status, request_id, epoch_time, current_time, current_time ]) username = "******" account_id = "123" name = "dummy_user_2" status = 1 user_two_row_id = 202 request_id = "id_123" self.repo.execute( "INSERT INTO user (row_id, username, account_id, name, email, email_verified, status, request_id, " "request_time_epoch, row_created, row_updated) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", [ user_two_row_id, username, account_id, name, username, 1, status, request_id, epoch_time, current_time, current_time ]) row_id = 1111 user_row_id = user_two_row_id preference_type = "FEATURE_RELEASE" communication_type = "SMS" source = "PUBLISHER_DAPP" opt_out_reason = None status = 0 self.repo.execute( "INSERT INTO user_preference(row_id, user_row_id, preference_type, communication_type, source, " "opt_out_reason, status, created_on, updated_on) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s)", [ row_id, user_row_id, preference_type, communication_type, source, opt_out_reason, status, current_time, current_time ])
class TestCreateChannelConsumer(TestCase): def setUp(self): self.connection = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS) @patch( "wallets.service.wallet_service.WalletService.open_channel_by_third_party" ) def test_create_channel_event_consumer_success(self, create_channel_mock): create_channel_mock.return_value = {} channel_dao = ChannelDAO(self.connection) channel_dao.persist_create_channel_event( { "r": "0x7be1502b09f5997339571f4885194417d6ca84ca65f98a9a2883d981d071ba62", "s": "0x55bcc83399b93bc60d70d4b10e33db626eac0dafd863b91e00a6b4b2c3586eb6", "v": 27, "amount": 2, "org_id": "snet", "sender": "0x4A3Beb90be90a28fd6a54B6AE449dd93A3E26dd0", "currency": "USD", "group_id": "m5FKWq4hW0foGW5qSbzGSjgZRuKs7A1ZwbIrJ9e96rc=", "order_id": "b7d9ffa0-07a3-11ea-b3cf-9e57fd86be16", "recipient": "0xfA8a01E837c30a3DA3Ea862e6dB5C6232C9b800A", "signature": "0x7be1502b09f5997339571f4885194417d6ca84ca65f98a9a2883d981d071ba6255bcc83399b93bc60d70d4b10e33db626eac0dafd863b91e00a6b4b2c3586eb61b", "amount_in_cogs": 4000, "current_block_no": 6780504 }, datetime.now().strftime("%Y-%m-%d %H:%M:%S")) ManageCreateChannelEvent().manage_create_channel_event() channel_create_events = channel_dao.get_one_create_channel_event( TransactionStatus.PENDING) if channel_create_events is None: assert True def test_create_channel_even_consumer_no_data(self): ManageCreateChannelEvent().manage_create_channel_event() def tearDown(self): self.connection.execute("DELETE FROM create_channel_event")
def get_public_keys(self): organization_id = self.event['headers']['x-organizationid'] group_id = self.event['headers']['x-groupid'] service_id = self.event['headers']['x-serviceid'] query = 'SELECT public_key FROM demon_auth_keys WHERE org_id = %s AND service_id = %s AND group_id = %s ' stored_public_keys = Repository(net_id=NET_ID, NETWORKS=NETWORKS).execute( query, [organization_id, service_id, group_id]) public_keys = [] if stored_public_keys: for stored_public_key in stored_public_keys: public_keys.append(stored_public_key['public_key']) return public_keys
class RFAIClaimBackRequestEventConsumer(RFAIEventConsumer): _connection = Repository(NETWORKS=NETWORK) _rfai_request_repository = RequestDAO(_connection) def __init__(self, net_id, ws_provider, ipfs_url, ipfs_port): super().__init__(net_id, ws_provider, ipfs_url, ipfs_port) def on_event(self, event): event_data = self._get_event_data(event) request_id = event_data['requestId'] [ found, request_id, requester, total_fund, document_uri, expiration, end_submission, end_evaluation, status, stake_members, submitters ] = self._get_rfai_service_request_by_id(request_id) print("reached")
class RFAIAddFoundationMemberEventConsumer(RFAIEventConsumer): _connection = Repository(NETWORKS=NETWORK) _rfai_foundation_member_repository = FoundationMemberDAO(_connection) def __init__(self, net_id, ws_provider, ipfs_url, ipfs_port): super().__init__(net_id, ws_provider, ipfs_url, ipfs_port) def on_event(self, event): event_data = self._get_event_data(event) member_address = event_data['member'] actor = event_data['actor'] role = event_data['role'] status = int(event_data['status'] == True) # check for last attribute created_at, right now set as current time . self._rfai_foundation_member_repository.create_or_update_foundation_member( member_address, role, status, actor, datetime.datetime.utcnow())
class RFAIExtendRequestEventConsumer(RFAIEventConsumer): _connection = Repository(NETWORKS=NETWORK) _request_dao = RequestDAO(_connection) def __init__(self, net_id, ws_provider, ipfs_url, ipfs_port): super().__init__(net_id, ws_provider, ipfs_url, ipfs_port) def on_event(self, event): # need to change this whenever we clean up it should nto be tied with db column name event_data = self._get_event_data(event) expiration = event_data['expiration'] requester = event_data['requester'] request_id = event_data['requestId'] filter_params = {"expiration": expiration} self._request_dao.update_request_for_given_request_id( request_id, filter_params)
class RFAICreateRequestEventConsumer(RFAIEventConsumer): _connection = Repository(NETWORKS=NETWORK) _rfai_request_repository = RequestDAO(_connection) def __init__(self, net_id, ws_provider, ipfs_url, ipfs_port): super().__init__(net_id, ws_provider, ipfs_url, ipfs_port) def on_event(self, event): event_data = self._get_event_data(event) request_id = event_data['requestId'] requester = event_data['requester'] expiration = event_data['expiration'] amount = event_data['amount'] metadata_hash = self._get_metadata_hash(event_data['documentURI']) self._process_create_request_event(request_id, requester, expiration, amount, metadata_hash) self._stake_dao_repository.create_stake( request_id, requester, amount, 0, event["data"]["transactionHash"], dt.utcnow()) def _process_create_request_event(self, request_id, requester, expiration, amount, metadata_hash): [ found, request_id, requester, total_fund, document_uri, expiration, end_submission, end_evaluation, status, stake_members, submitters ] = self._get_rfai_service_request_by_id(request_id) rfai_metadata = eval(self._get_rfai_metadata_from_ipfs(metadata_hash)) title = rfai_metadata['title'] requester_name = requester description = rfai_metadata['description'] git_hub_link = '' training_data_set_uri = rfai_metadata['training-dataset'] acceptance_criteria = rfai_metadata['acceptance-criteria'] request_actor = '' created_at = rfai_metadata['created'] self._rfai_request_repository.create_request( request_id, requester, total_fund, metadata_hash, expiration, end_submission, end_evaluation, status, title, requester_name, description, git_hub_link, training_data_set_uri, acceptance_criteria, request_actor, created_at)
class RFAIAddSolutionRequestEventConsumer(RFAIEventConsumer): _connection = Repository(NETWORKS=NETWORK) _rfai_solution_repository = SolutionDAO(_connection) def __init__(self, net_id, ws_provider, ipfs_url, ipfs_port): super().__init__(net_id, ws_provider, ipfs_url, ipfs_port) def on_event(self, event): event_data = self._get_event_data(event) request_id = event_data['requestId'] result = self._get_rfai_service_request_by_id(request_id) [ found, request_id, requester, total_fund, document_uri, expiration, end_submission, end_evaluation, status, stake_members, submitters ] = self._get_rfai_service_request_by_id(request_id) self._rfai_solution_repository.create_or_update_solution( request_id=request_id, submitter=event_data["submitter"], doc_uri=event_data["solutionDocURI"], claim_amount=0, created_at=datetime.datetime.utcnow())
def test_on_event_channel_open(self): event = { "data": { 'row_id': 347, 'block_no': 6629145, 'event': 'ChannelOpen', 'json_str': '{\'sender\': \'0x669CCF5025C08304Fd836d7A136634E22C5Dd31C\', \'recipient\': \'0xaceB1EaCA36061ff29Ddb7c963142abbFf23e508\', \'groupId\': b"\\xbc\\xb0\\xa1\\x93Z\\xa1\\xab\\x11\\xfd\\xbcX\\x1c\\x1cxZ\\xdc.\\xb6\\xba\\x8e\\xc6\\xc8C*\\xd7\\xa9\\xea\\x91\\xe6\'\\xae\\xfc", \'channelId\': 143, \'nonce\': 0, \'signer\': \'0x669CCF5025C08304Fd836d7A136634E22C5Dd31C\', \'amount\': 1, \'expiration\': 8731644}', 'processed': b'\x00', 'transactionHash': "b'\\xb7X\\xde\\x13{\\x13\\x05$\\x99\\x8c\\x1a\\xe4\\xae\\xdf\\x0f\\x88\\x08\\xd4\\x0f\\x7fVV^T;s\\x93\\x90$?\\xe6\\x14'", 'logIndex': '42', 'error_code': 1, 'error_msg': '', 'row_updated': datetime(2019, 10, 23, 12, 35, 53), 'row_created': datetime(2019, 10, 23, 12, 35, 53) }, "name": "ChannelOpen" } mpe_event_consumer = MPEEventConsumer("wss://ropsten.infura.io/ws") mpe_repository = MPERepository( Repository(NETWORK_ID, NETWORKS=NETWORKS)) mpe_repository.delete_mpe_channel(143) mpe_event_consumer.on_event(event=event) channel_result = mpe_repository.get_mpe_channels(143) assert channel_result[0]['channel_id'] == 143 assert channel_result[0][ 'sender'] == '0x669CCF5025C08304Fd836d7A136634E22C5Dd31C' assert channel_result[0][ 'recipient'] == '0xaceB1EaCA36061ff29Ddb7c963142abbFf23e508' assert channel_result[0][ 'groupId'] == 'vLChk1qhqxH9vFgcHHha3C62uo7GyEMq16nqkeYnrvw=' assert channel_result[0]['balance_in_cogs'] == Decimal('1.00000000') assert channel_result[0]['pending'] == Decimal('0E-8') assert channel_result[0]['expiration'] == 8731644 assert channel_result[0][ 'signer'] == '0x669CCF5025C08304Fd836d7A136634E22C5Dd31C'
class RFAIAddFoundationMemberEventConsumer(RFAIEventConsumer): _connection = Repository(NETWORKS=NETWORK) _rfai_foundation_member_repository = FoundationMemberDAO(_connection) def __init__(self, net_id, ws_provider, ipfs_url, ipfs_port): super().__init__(net_id, ws_provider, ipfs_url, ipfs_port) def on_event(self, event): created_at_in_epoch = self._blockchain_util.get_created_at_for_block( block_no=event["data"]["block_no"]).get("timestamp", None) if created_at_in_epoch is not None: created_at = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(created_at_in_epoch)) event_data = self._get_event_data(event) member_address = event_data['member'] actor = event_data['actor'] role = event_data['role'] status = int(event_data['status'] == True) self._rfai_foundation_member_repository.create_or_update_foundation_member( member_address=member_address, role=role, status=status, request_actor=actor, created_at=created_at)
class RFAIVoteRequestEventConsumer(RFAIEventConsumer): _connection = Repository(NETWORKS=NETWORK) _rfai_vote_repository = VoteDAO(_connection) _rfai_solution_repository = SolutionDAO(_connection) def __init__(self, net_id, ws_provider, ipfs_url, ipfs_port): super().__init__(net_id, ws_provider, ipfs_url, ipfs_port) def on_event(self, event): event_data = self._get_event_data(event) request_id = event_data['requestId'] solution_data = self._rfai_solution_repository.get_solution_id_for_given_submitter( request_id=request_id, submitter=event_data["submitter"]) self._create_or_update_vote( request_id=request_id, voter=event_data["voter"], rfai_solution_id=solution_data["rfai_solution_id"]) def _create_or_update_vote(self, request_id, voter, rfai_solution_id): self._rfai_vote_repository.create_or_update_vote( request_id=request_id, voter=voter, rfai_solution_id=rfai_solution_id, created_at=datetime.datetime.utcnow())
import json import logging import re import traceback from schema import Schema, And from common.constant import NETWORKS from common.repository import Repository from mpe import MPE from registry import Registry NETWORKS_NAME = dict( (NETWORKS[netId]['name'], netId) for netId in NETWORKS.keys()) db = dict((netId, Repository(net_id=netId)) for netId in NETWORKS.keys()) def request_handler(event, context): print(event) if 'path' not in event: return get_response(400, "Bad Request") try: payload_dict = None resp_dta = None path = event['path'].lower() stage = event['requestContext']['stage'] net_id = NETWORKS_NAME[stage] if event['httpMethod'] == 'POST': body = event['body'] if body is not None and len(body) > 0: payload_dict = json.loads(body)
def __init__(self, err_obj, net_id): self.err_obj = err_obj self.repo = Repository(net_id) self.util_obj = Utils()
from common.logger import get_logger from common.repository import Repository from common.utils import Utils from rfai.config import NETWORK_ID, NETWORK, SLACK_HOOK from rfai.constant import REQUIRED_KEYS_FOR_GET_RFAI_EVENT, REQUIRED_KEYS_FOR_GET_RFAI_SUMMARY_EVENT, \ REQUIRED_KEYS_FOR_GET_SOLUTION_FOR_REQUEST_EVENT, REQUIRED_KEYS_FOR_GET_STAKE_FOR_REQUEST_EVENT, \ REQUIRED_KEYS_FOR_GET_VOTE_FOR_REQUEST_EVENT from rfai.service.rfai_service import RFAIService rfai = RFAIService(repo=Repository(NETWORKS=NETWORK)) util = Utils() logger = get_logger(__name__) def request_handler(event, context): try: logger.info(event) valid_event = util.validate_dict( data_dict=event, required_keys=REQUIRED_KEYS_FOR_GET_RFAI_EVENT) if not valid_event: return util.generate_lambda_response(400, "Bad Request", cors_enabled=True) query_string_parameters = event["queryStringParameters"] response_data = rfai.get_requests( query_string_parameters=query_string_parameters) response = util.generate_lambda_response(200, { "status": "success", "data": response_data }, cors_enabled=True)
class HandleContractsDB: def __init__(self, err_obj, net_id): self.err_obj = err_obj self.repo = Repository(net_id) self.util_obj = Utils() # read operations def read_registry_events(self): query = 'select * from registry_events_raw where processed = 0 order by block_no asc limit ' + EVNTS_LIMIT evts_dta = self.repo.execute(query) print('read_registry_events::read_count: ', len(evts_dta)) return evts_dta def read_mpe_events(self): query = 'select * from mpe_events_raw where processed = 0 order by block_no asc limit ' + EVNTS_LIMIT evts_dta = self.repo.execute(query) print('read_mpe_events::read_count: ', len(evts_dta)) return evts_dta def _get_srvc_row_id(self, org_id, service_id): print('get_srvc_row_id::service_id: ', service_id) query = 'SELECT row_id FROM service WHERE service_id = %s AND org_id = %s ' srvc_data = self.repo.execute(query, [service_id, org_id]) print('get_srvc_row_id::srvc_data: ', srvc_data) return srvc_data def _get_srvcs(self, org_id): query = 'SELECT * FROM service WHERE org_id = %s ' srvc_data = self.repo.execute(query, (org_id)) print('_get_srvcs::srvc_data: ', srvc_data) return srvc_data # write operations def _create_or_updt_org(self, org_id, org_data, conn): upsert_qry = "Insert into organization (org_id, organization_name, owner_address, row_updated, row_created) " \ "VALUES ( %s, %s, %s, %s, %s ) " \ "ON DUPLICATE KEY UPDATE organization_name = %s, owner_address = %s, row_updated = %s " upsert_params = [org_id, org_data[2], org_data[3], dt.utcnow(), dt.utcnow(), org_data[2], org_data[3], dt.utcnow()] print('upsert_qry: ', upsert_qry) qry_resp = conn.execute(upsert_qry, upsert_params) print('_create_or_updt_org::row upserted: ', qry_resp) def _create_or_updt_members(self, org_id, members, conn): upsrt_members = "INSERT INTO members (org_id, member, row_created, row_updated)" \ "VALUES ( %s, %s, %s, %s )" \ "ON DUPLICATE KEY UPDATE row_updated = %s " cnt = 0 for member in members: upsrt_members_params = [org_id, member, dt.utcnow(), dt.utcnow(), dt.utcnow()] qry_res = conn.execute(upsrt_members, upsrt_members_params) cnt = cnt + qry_res[0] print('create_or_updt_members::row upserted', cnt) def _create_channel(self, q_dta, conn): upsrt_mpe_chnl = "INSERT INTO mpe_channel (channel_id, sender, recipient, groupId, balance_in_cogs, pending, nonce, " \ "expiration, signer, row_created, row_updated) " \ "VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) " \ "ON DUPLICATE KEY UPDATE balance_in_cogs = %s, pending = %s, nonce = %s, " \ "expiration = %s, row_updated = %s" upsrt_mpe_chnl_params = [q_dta['channelId'], q_dta['sender'], q_dta['recipient'], q_dta['groupId'], q_dta['amount'], 0.0, q_dta['nonce'], q_dta['expiration'], q_dta['signer'], dt.utcnow(), dt.utcnow(), q_dta['amount'], 0.0, q_dta['nonce'], q_dta['expiration'], dt.utcnow()] qry_res = conn.execute(upsrt_mpe_chnl, upsrt_mpe_chnl_params) print('_create_channel::row upserted', qry_res) def _del_srvc(self, org_id, service_id, conn): del_srvc = 'DELETE FROM service WHERE service_id = %s AND org_id = %s ' qry_res = conn.execute(del_srvc, [service_id, org_id]) print('_del_srvc::rows deleted: ', qry_res) def _del_org(self, org_id, conn): del_org = 'DELETE FROM organization WHERE org_id = %s ' qry_res = conn.execute(del_org, org_id) print('_del_org::rows deleted: ', qry_res) def _del_members(self, org_id, conn): del_org = 'DELETE FROM members WHERE org_id = %s ' qry_res = conn.execute(del_org, org_id) print('_del_members::rows deleted: ', qry_res) def _del_tags(self, org_id, service_id, conn): del_srvc_tags = 'DELETE FROM service_tags WHERE service_id = %s AND org_id = %s ' del_srvc_tags_count = conn.execute(del_srvc_tags, [service_id, org_id]) print('_del_tags::del_srvc_tags: ', del_srvc_tags_count) def _del_srvc_dpndts(self, org_id, service_id, conn): print("_del_srvc_dpndts::service_id: ", service_id, '|org_id: ', org_id) del_srvc_grps = 'DELETE FROM service_group WHERE service_id = %s AND org_id = %s ' del_srvc_grps_count = conn.execute(del_srvc_grps, [service_id, org_id]) del_srvc_endpts = 'DELETE FROM service_endpoint WHERE service_id = %s AND org_id = %s ' del_srvc_endpts_count = conn.execute(del_srvc_endpts, [service_id, org_id]) del_srvc_st = 'DELETE FROM service_status WHERE service_id = %s AND org_id = %s ' del_srvc_st_count = conn.execute(del_srvc_st, [service_id, org_id]) self._del_tags(org_id=org_id, service_id=service_id, conn=conn) print('_del_srvc_dpndts::del_srvc_grps: ', del_srvc_grps_count, '|del_srvc_endpts: ', del_srvc_endpts_count, '|del_srvc_st_count: ', del_srvc_st_count) def _create_or_updt_srvc(self, org_id, service_id, ipfs_hash, conn): upsrt_srvc = "INSERT INTO service (org_id, service_id, is_curated, ipfs_hash, row_created, row_updated) " \ "VALUES (%s, %s, %s, %s, %s, %s) " \ "ON DUPLICATE KEY UPDATE ipfs_hash = %s, row_updated = %s " upsrt_srvc_params = [org_id, service_id, 0, ipfs_hash, dt.utcnow(), dt.utcnow(), ipfs_hash, dt.utcnow()] qry_res = conn.execute(upsrt_srvc, upsrt_srvc_params) print('_create_or_updt_srvc::row upserted', qry_res) return qry_res[len(qry_res) - 1] def _create_or_updt_srvc_mdata(self, srvc_rw_id, org_id, service_id, ipfs_data, conn): upsrt_srvc_mdata = "INSERT INTO service_metadata (service_row_id, org_id, service_id, price_model, " \ "price_in_cogs, display_name, model_ipfs_hash, description, url, json, encoding, type, " \ "mpe_address, payment_expiration_threshold, row_updated, row_created) " \ "VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) " \ "ON DUPLICATE KEY UPDATE service_row_id = %s, price_model = %s, price_in_cogs = %s, " \ "display_name = %s, model_ipfs_hash = %s, description = %s, url = %s, json = %s, " \ "encoding = %s, type = %s, mpe_address = %s, payment_expiration_threshold = %s, row_updated = %s " price = ipfs_data['pricing'] price_model = price.get('price_model', '') price_in_cogs = price.get('price_in_cogs', '') pm_exp_th = ipfs_data.get('payment_expiration_threshold') srvc_desc = ipfs_data.get('service_description', {}) desc = srvc_desc.get('description', '') url = srvc_desc.get('url', '') json_str = ipfs_data.get('json', '') upsrt_srvc_mdata_params = [srvc_rw_id, org_id, service_id, price_model, price_in_cogs, ipfs_data['display_name'], ipfs_data['model_ipfs_hash'], desc, url, json_str, ipfs_data['encoding'], ipfs_data['service_type'], ipfs_data['mpe_address'], pm_exp_th, dt.utcnow(), dt.utcnow(), srvc_rw_id, price_model, price_in_cogs, ipfs_data['display_name'], ipfs_data['model_ipfs_hash'],desc, url, json_str, ipfs_data['encoding'], ipfs_data['service_type'], ipfs_data['mpe_address'], pm_exp_th, dt.utcnow()] qry_res = conn.execute(upsrt_srvc_mdata, upsrt_srvc_mdata_params) print('_create_or_updt_srvc_mdata::row upserted', qry_res) def _create_grp(self, srvc_rw_id, org_id, service_id, grp_data, conn): insrt_grp = "INSERT INTO service_group (service_row_id, org_id, service_id, group_id, group_name, " \ "payment_address, row_updated, row_created)" \ "VALUES(%s, %s, %s, %s, %s, %s, %s, %s)" insrt_grp_params = [srvc_rw_id, org_id, service_id, grp_data['group_id'], grp_data['group_name'], grp_data['payment_address'], dt.utcnow(), dt.utcnow()] return conn.execute(insrt_grp, insrt_grp_params) def _create_edpts(self, srvc_rw_id, org_id, service_id, endpt_data, conn): insrt_endpt = "INSERT INTO service_endpoint (service_row_id, org_id, service_id, group_id, endpoint, " \ "row_created, row_updated) " \ "VALUES(%s, %s, %s, %s, %s, %s, %s)" insrt_endpt_params = [srvc_rw_id, org_id, service_id, endpt_data['group_id'], endpt_data['endpoint'], dt.utcnow(), dt.utcnow()] return conn.execute(insrt_endpt, insrt_endpt_params) def _create_tags(self, srvc_rw_id, org_id, service_id, tag_name, conn): insrt_tag = "INSERT INTO service_tags (service_row_id, org_id, service_id, tag_name, row_created, row_updated) " \ "VALUES(%s, %s, %s, %s, %s, %s) " \ "ON DUPLICATE KEY UPDATE tag_name = %s, row_updated = %s " insrt_tag_params = [srvc_rw_id, org_id, service_id, tag_name, dt.utcnow(), dt.utcnow(), tag_name, dt.utcnow()] qry_res = conn.execute(insrt_tag, insrt_tag_params) print('_create_tags::qry_res: ', qry_res) def _updt_raw_evts(self, row_id, type, err_cd, err_msg, conn): try: if type == 'REG': updt_evts = 'UPDATE registry_events_raw SET processed = 1, error_code = %s, error_msg = %s WHERE row_id = %s ' elif type == 'MPE': updt_evts = 'UPDATE mpe_events_raw SET processed = 1, error_code = %s, error_msg = %s WHERE row_id = %s ' updt_evts_resp = self.repo.execute(updt_evts, [err_cd, err_msg, row_id]) print('updt_raw_evts::row updated: ', updt_evts_resp, '|', type) except Exception as e: self.util_obj.report_slack(type=1, slack_msg=repr(e)) print('Error in updt_reg_evts_raw::error: ', e) def updt_raw_evts(self, row_id, type, err_cd, err_msg): conn = self.repo self._updt_raw_evts(row_id, type, err_cd, err_msg, conn) def del_org(self, org_id): self.repo.auto_commit = False conn = self.repo try: self._del_org(org_id=org_id, conn=conn) srvcs = self._get_srvcs(org_id=org_id) for rec in srvcs: self._del_srvc(org_id=org_id, service_id=rec['service_id'], conn=conn) self._commit(conn=conn) except Exception as e: self.util_obj.report_slack(type=1, slack_msg=repr(e)) self._rollback(conn=conn, err=repr(e)) def del_srvc(self, org_id, service_id): self._del_srvc(org_id=org_id, service_id=service_id, conn=self.repo) def create_channel(self, q_dta): if q_dta['groupId'][0:2] == '0x': q_dta['groupId'] = q_dta['groupId'][2:] q_dta['groupId'] = base64.b64encode(bytes.fromhex(q_dta['groupId'])).decode('utf8') self._create_channel(q_dta, self.repo) def update_channel(self, channel_id, group_id, channel_data): print('update_channel::channel_id: ', channel_id) self._create_channel(q_dta={ 'sender': channel_data[1], 'recipient': channel_data[3], 'nonce': int(channel_data[0]), 'expiration': channel_data[6], 'signer': channel_data[2], 'groupId': group_id, 'channelId': channel_id, 'amount': channel_data[5] }, conn=self.repo) def process_srvc_data(self, org_id, service_id, ipfs_hash, ipfs_data, tags_data): self.repo.auto_commit = False conn = self.repo try: self._del_srvc_dpndts(org_id=org_id, service_id=service_id, conn=conn) qry_data = self._create_or_updt_srvc(org_id=org_id, service_id=service_id, ipfs_hash=ipfs_hash, conn=conn) service_row_id = qry_data['last_row_id'] print('service_row_id == ', service_row_id) self._create_or_updt_srvc_mdata(srvc_rw_id=service_row_id, org_id=org_id, service_id=service_id, ipfs_data=ipfs_data, conn=conn) grps = ipfs_data.get('groups', []) cnt = 0 grp_name_id_dict = {} for grp in grps: grp_name_id_dict[grp['group_name']] = grp['group_id'] qry_data = self._create_grp(srvc_rw_id=service_row_id, org_id=org_id, service_id=service_id, conn=conn, grp_data={ 'group_id': grp['group_id'], 'group_name': grp['group_name'], 'payment_address': grp['payment_address'] }) cnt = cnt + qry_data[0] print('rows insert in grp: ', cnt) endpts = ipfs_data.get('endpoints', []) cnt = 0 for endpt in endpts: qry_data = self._create_edpts(srvc_rw_id=service_row_id, org_id=org_id, service_id=service_id, conn=conn, endpt_data={ 'endpoint': endpt['endpoint'], 'group_id': grp_name_id_dict[endpt['group_name']] }) cnt = cnt + qry_data[0] print('rows insert in endpt: ', cnt) if (tags_data is not None and tags_data[0]): tags = tags_data[3] for tag in tags: tag = tag.decode('utf-8') tag = tag.rstrip("\u0000") self._create_tags(srvc_rw_id=service_row_id, org_id=org_id, service_id=service_id, tag_name=tag, conn=conn) self._commit(conn=conn) except Exception as e: self.util_obj.report_slack(type=1, slack_msg=repr(e)) self._rollback(conn=conn, err=repr(e)) def process_org_data(self, org_id, org_data): self.repo.auto_commit = False conn = self.repo try: if (org_data is not None and org_data[0]): self._create_or_updt_org(org_id, org_data, conn) self._del_members(org_id=org_id, conn=conn) self._create_or_updt_members(org_id, org_data[4], conn) self._commit(conn) except Exception as e: self.util_obj.report_slack(type=1, slack_msg=repr(e)) self._rollback(conn=conn, err=repr(e)) def update_tags(self, org_id, service_id, tags_data): self.repo.auto_commit = False conn = self.repo try: self._del_tags(org_id=org_id, service_id=service_id, conn=conn) if (tags_data is not None and tags_data[0]): tags = tags_data[3] srvc_data = self._get_srvc_row_id(service_id=service_id, org_id=org_id) srvc_rw_id = srvc_data[0]['row_id'] for tag in tags: tag = tag.decode('utf-8') tag = tag.rstrip("\u0000") self._create_tags(srvc_rw_id=srvc_rw_id, org_id=org_id, service_id=service_id, tag_name=tag, conn=conn) self._commit(conn) except Exception as e: self.util_obj.report_slack(type=1, slack_msg=repr(e)) self._rollback(conn=conn, err=repr(e)) # def _commit(self, conn): conn.auto_commit = True conn.connection.commit() print('_commit') print(conn.connection) def _rollback(self, conn, err): print('_rollback ::error: ', err) conn.auto_commit = True conn.connection.rollback()