class TestUserService(TestCase):
    def setUp(self):
        self.repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS)

    @patch("common.boto_utils.BotoUtils.invoke_lambda")
    def test_delete_user(self, mock_invoke_lambda):
        user_service = UserService()
        username = "******"
        account_id = "123"
        name = "dummy_135"
        status = 1
        request_id = "id_123"
        current_time = datetime.utcnow()
        epoch_time = current_time.timestamp()
        self.repo.execute(
            "INSERT INTO user (username, account_id, name, email, email_verified, status, request_id, "
            "request_time_epoch, row_created, row_updated) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
            [
                username, account_id, name, username, 1, status, request_id,
                epoch_time, current_time, current_time
            ])
        mock_invoke_lambda.return_value = {"statusCode": 201, "body": ""}
        user_service.delete_user(username)
        user_data = self.repo.execute(
            "SELECT username FROM user WHERE username = %s", [username])
        if len(user_data) == 0:
            assert True
        else:
            assert True

    def tearDown(self):
        self.repo.execute("DELETE FROM user")
Example #2
0
class DappUserService(TestCase):
    def setUp(self):
        self._repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS)

    @patch("common.utils.Utils.report_slack")
    def test_register_user_on_post_cognito_signup(self, report_slack):
        report_slack.return_value = None
        event = {
            'version': '1',
            'region': 'us-east-2',
            'userPoolId': 'us-east-2 tyuiop',
            'userName': '******',
            'callerContext': {
                'awsSdkVersion': 'aws-sdk-unknown-unknown',
                'clientId': 'dummy_client_id_1'
            },
            'triggerSource': 'PostConfirmation_ConfirmSignUp',
            'request': {
                'userAttributes': {
                    'sub': '23456789-d2cb-4388-9915-3456789',
                    'cognito:email_alias': '*****@*****.**',
                    'cognito:user_status': 'CONFIRMED',
                    'email_verified': 'true',
                    'nickname': 'Piyush',
                    'email': '*****@*****.**'
                }
            },
            'response': {}
        }
        event_response = register_user_post_aws_cognito_signup(event, None)
        assert (event_response == event)

    def tearDown(self):
        self._repo.execute("DELETE FROM user")
class TestWalletAPI(unittest.TestCase):
    def setUp(self):
        self.NETWORKS_NAME = dict((NETWORKS[netId]["name"], netId) for netId in NETWORKS.keys())
        self.repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS)

    @patch("common.utils.Utils.report_slack")
    @patch("common.blockchain_util.BlockChainUtil.create_account")
    def test_create_wallet(self, mock_create_account, mock_report_slack):

        create_wallet_event = {
            "path": "/wallet",
            "httpMethod": "POST",
            "body": '{"username": "******"}',
        }
        mock_create_account.return_value = (
            "323449587122651441342932061624154600879572532581",
            "26561428888193216265620544717131876925191237116680314981303971688115990928499",
        )
        response = lambda_handler.request_handler(create_wallet_event,
                                                  context=None)
        assert response["statusCode"] == 200
        response_body = json.loads(response["body"])
        assert response_body["status"] == "success"
        assert (response_body["data"]["address"] ==
                "323449587122651441342932061624154600879572532581")
        assert (
            response_body["data"]["private_key"] ==
            "26561428888193216265620544717131876925191237116680314981303971688115990928499"
        )
        assert response_body["data"]["status"] == 0
        assert response_body["data"]["type"] == "GENERAL"

    @patch("common.utils.Utils.report_slack")
    def test_create_wallet_and_channel(self, mock_report_slack):
        pass

    @patch("common.utils.Utils.report_slack")
    def test_create_channel(self, mock_report_slack):
        pass

    @patch("common.utils.Utils.report_slack")
    def test_top_up_channel(self, mock_report_slack):
        pass

    @patch("common.utils.Utils.report_slack")
    def test_get_wallet_details(self, mock_report_slack):
        pass

    @patch("common.utils.Utils.report_slack")
    def test_register_wallets(self, mock_report_slack):
        pass

    @patch("common.utils.Utils.report_slack")
    def test_set_default_wallet(self, mock_report_slack):
        pass

    def tearDown(self):
        self.repo.execute("DELETE FROM wallet")
        self.repo.execute("DELETE FROM user_wallet")
Example #4
0
class TestMPE(TestCase):
    def setUp(self):
        self.NETWORKS_NAME = dict(
            (NETWORKS[netId]["name"], netId) for netId in NETWORKS.keys())
        self.repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS)
        self.mpe = MPE(self.repo)

    def test_update_consumed_balance(self):
        mpe_repo = MPERepository(self.repo)
        mpe_repo.create_channel({
            "channelId": 1,
            "sender": '0x123',
            "recipient": "0x345",
            "groupId":
            b"\xbc\xb0\xa1\x93Z\xa1\xab\x11\xfd\xbcX\x1c\x1cxZ\xdc.\xb6\xba\x8e\xc6\xc8C*\xd7\xa9\xea\x91\xe6'\xae\xfc",
            "amount": 100,
            "pending": 0,
            "nonce": 0,
            "expiration": 8396357,
            "signer": '0x987',
            "consumed_balance": 12
        })
        mpe_repo.create_channel({
            "channelId": 2,
            "sender": '0x3432',
            "recipient": "0x5453",
            "groupId":
            b"\xbc\xb0\xa1\x93Z\xa1\xab\x11\xfd\xbcX\x1c\x1cxZ\xdc.\xb6\xba\x8e\xc6\xc8C*\xd7\xa9\xea\x91\xe6'\xae\xfc",
            "amount": 100,
            "pending": 0,
            "nonce": 1,
            "expiration": 8396357,
            "signer": '0x987',
            "consumed_balance": 3
        })

        self.assertDictEqual({},
                             self.mpe.update_consumed_balance(1, 13, 100, 0))
        try:
            self.mpe.update_consumed_balance(2, 2, 100, 1)
        except:
            assert True

        try:
            self.mpe.update_consumed_balance(2, 4, 100, 0)
        except:
            assert True
        try:
            self.mpe.update_consumed_balance(2, 4, 80, 1)
        except:
            assert True

    def tearDown(self):
        self.repo.execute("DELETE FROM `mpe_channel`")
 def test_update_transaction_status(self, mock_report_slack):
     response = request_handler(event={}, context=None)
     assert (response == "success")
     repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS)
     query_response = repo.execute(
         "SELECT * FROM transaction_history WHERE status = %s",
         "ORDER_CANCELED")
     assert (len(query_response) > 1)
Example #6
0
class TestCreateChannelConsumer(TestCase):
    def setUp(self):
        self.connection = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS)

    @patch(
        "wallets.service.wallet_service.WalletService.open_channel_by_third_party"
    )
    def test_create_channel_event_consumer_success(self, create_channel_mock):
        create_channel_mock.return_value = {}
        channel_dao = ChannelDAO(self.connection)
        channel_dao.persist_create_channel_event(
            {
                "r":
                "0x7be1502b09f5997339571f4885194417d6ca84ca65f98a9a2883d981d071ba62",
                "s":
                "0x55bcc83399b93bc60d70d4b10e33db626eac0dafd863b91e00a6b4b2c3586eb6",
                "v": 27,
                "amount": 2,
                "org_id": "snet",
                "sender": "0x4A3Beb90be90a28fd6a54B6AE449dd93A3E26dd0",
                "currency": "USD",
                "group_id": "m5FKWq4hW0foGW5qSbzGSjgZRuKs7A1ZwbIrJ9e96rc=",
                "order_id": "b7d9ffa0-07a3-11ea-b3cf-9e57fd86be16",
                "recipient": "0xfA8a01E837c30a3DA3Ea862e6dB5C6232C9b800A",
                "signature":
                "0x7be1502b09f5997339571f4885194417d6ca84ca65f98a9a2883d981d071ba6255bcc83399b93bc60d70d4b10e33db626eac0dafd863b91e00a6b4b2c3586eb61b",
                "amount_in_cogs": 4000,
                "current_block_no": 6780504
            },
            datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
        ManageCreateChannelEvent().manage_create_channel_event()
        channel_create_events = channel_dao.get_one_create_channel_event(
            TransactionStatus.PENDING)
        if channel_create_events is None:
            assert True

    def test_create_channel_even_consumer_no_data(self):
        ManageCreateChannelEvent().manage_create_channel_event()

    def tearDown(self):
        self.connection.execute("DELETE FROM create_channel_event")
 def test_cancel_given_order(self, mock_report_slack):
     event = {
         "path":
         "/orchestrator/order/Fb736cfa-dae4-11e9-9769-26327914c219/cancel",
         "pathParameters": {
             "order_id": "Fb736cfa-dae4-11e9-9769-26327914c219"
         },
         "httpMethod": "GET"
     }
     response = cancel_given_order(event=event, context=None)
     assert (response["statusCode"] == 200)
     response_body = json.loads(response["body"])
     assert (response_body["status"] == "success")
     repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS)
     query_response = repo.execute(
         "SELECT * FROM transaction_history WHERE order_id = %s AND status = %s",
         ["Fb736cfa-dae4-11e9-9769-26327914c219", "ORDER_CANCELED"])
     assert (len(query_response) == 1)
class HandleContractsDB:
    def __init__(self, err_obj, net_id):
        self.err_obj = err_obj
        self.repo = Repository(net_id)
        self.util_obj = Utils()

    # read operations
    def read_registry_events(self):
        query = 'select * from registry_events_raw where processed = 0 order by block_no asc limit ' + EVNTS_LIMIT
        evts_dta = self.repo.execute(query)
        print('read_registry_events::read_count: ', len(evts_dta))
        return evts_dta

    def read_mpe_events(self):
        query = 'select * from mpe_events_raw where processed = 0 order by block_no asc limit ' + EVNTS_LIMIT
        evts_dta = self.repo.execute(query)
        print('read_mpe_events::read_count: ', len(evts_dta))
        return evts_dta

    def _get_srvc_row_id(self, org_id, service_id):
        print('get_srvc_row_id::service_id: ', service_id)
        query = 'SELECT row_id FROM service WHERE service_id = %s AND org_id = %s '
        srvc_data = self.repo.execute(query, [service_id, org_id])
        print('get_srvc_row_id::srvc_data: ', srvc_data)
        return srvc_data

    def _get_srvcs(self, org_id):
        query = 'SELECT * FROM service WHERE org_id = %s '
        srvc_data = self.repo.execute(query, (org_id))
        print('_get_srvcs::srvc_data: ', srvc_data)
        return srvc_data

    # write operations
    def _create_or_updt_org(self, org_id, org_data, conn):
        upsert_qry = "Insert into organization (org_id, organization_name, owner_address, row_updated, row_created) " \
                     "VALUES ( %s, %s, %s, %s, %s ) " \
                     "ON DUPLICATE KEY UPDATE organization_name = %s, owner_address = %s, row_updated = %s  "
        upsert_params = [org_id, org_data[2], org_data[3], dt.utcnow(), dt.utcnow(), org_data[2], org_data[3],
                         dt.utcnow()]
        print('upsert_qry: ', upsert_qry)
        qry_resp = conn.execute(upsert_qry, upsert_params)
        print('_create_or_updt_org::row upserted: ', qry_resp)

    def _create_or_updt_members(self, org_id, members, conn):
        upsrt_members = "INSERT INTO members (org_id, member, row_created, row_updated)" \
                        "VALUES ( %s, %s, %s, %s )" \
                        "ON DUPLICATE KEY UPDATE row_updated = %s "
        cnt = 0
        for member in members:
            upsrt_members_params = [org_id, member, dt.utcnow(), dt.utcnow(), dt.utcnow()]
            qry_res = conn.execute(upsrt_members, upsrt_members_params)
            cnt = cnt + qry_res[0]
        print('create_or_updt_members::row upserted', cnt)

    def _create_channel(self, q_dta, conn):
        upsrt_mpe_chnl = "INSERT INTO mpe_channel (channel_id, sender, recipient, groupId, balance_in_cogs, pending, nonce, " \
                         "expiration, signer, row_created, row_updated) " \
                         "VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) " \
                         "ON DUPLICATE KEY UPDATE balance_in_cogs = %s, pending = %s, nonce = %s, " \
                         "expiration = %s, row_updated = %s"
        upsrt_mpe_chnl_params = [q_dta['channelId'], q_dta['sender'], q_dta['recipient'], q_dta['groupId'],
                                 q_dta['amount'], 0.0, q_dta['nonce'], q_dta['expiration'], q_dta['signer'], dt.utcnow(),
                                 dt.utcnow(), q_dta['amount'], 0.0, q_dta['nonce'], q_dta['expiration'], dt.utcnow()]
        qry_res = conn.execute(upsrt_mpe_chnl, upsrt_mpe_chnl_params)
        print('_create_channel::row upserted', qry_res)

    def _del_srvc(self, org_id, service_id, conn):
        del_srvc = 'DELETE FROM service WHERE service_id = %s AND org_id = %s '
        qry_res = conn.execute(del_srvc, [service_id, org_id])
        print('_del_srvc::rows deleted: ', qry_res)

    def _del_org(self, org_id, conn):
        del_org = 'DELETE FROM organization WHERE org_id = %s '
        qry_res = conn.execute(del_org, org_id)
        print('_del_org::rows deleted: ', qry_res)

    def _del_members(self, org_id, conn):
        del_org = 'DELETE FROM members WHERE org_id = %s '
        qry_res = conn.execute(del_org, org_id)
        print('_del_members::rows deleted: ', qry_res)

    def _del_tags(self, org_id, service_id, conn):
        del_srvc_tags = 'DELETE FROM service_tags WHERE service_id = %s AND org_id = %s '
        del_srvc_tags_count = conn.execute(del_srvc_tags, [service_id, org_id])
        print('_del_tags::del_srvc_tags: ', del_srvc_tags_count)

    def _del_srvc_dpndts(self, org_id, service_id, conn):
        print("_del_srvc_dpndts::service_id: ", service_id, '|org_id: ', org_id)
        del_srvc_grps = 'DELETE FROM service_group WHERE service_id = %s AND org_id = %s '
        del_srvc_grps_count = conn.execute(del_srvc_grps, [service_id, org_id])

        del_srvc_endpts = 'DELETE FROM service_endpoint WHERE service_id = %s AND org_id = %s '
        del_srvc_endpts_count = conn.execute(del_srvc_endpts, [service_id, org_id])

        del_srvc_st = 'DELETE FROM service_status WHERE service_id = %s AND org_id = %s '
        del_srvc_st_count = conn.execute(del_srvc_st, [service_id, org_id])

        self._del_tags(org_id=org_id, service_id=service_id, conn=conn)
        print('_del_srvc_dpndts::del_srvc_grps: ', del_srvc_grps_count, '|del_srvc_endpts: ', del_srvc_endpts_count,
              '|del_srvc_st_count: ', del_srvc_st_count)

    def _create_or_updt_srvc(self, org_id, service_id, ipfs_hash, conn):
        upsrt_srvc = "INSERT INTO service (org_id, service_id, is_curated, ipfs_hash, row_created, row_updated) " \
                     "VALUES (%s, %s, %s, %s, %s, %s) " \
                     "ON DUPLICATE KEY UPDATE ipfs_hash = %s, row_updated = %s "
        upsrt_srvc_params = [org_id, service_id, 0, ipfs_hash, dt.utcnow(), dt.utcnow(), ipfs_hash, dt.utcnow()]
        qry_res = conn.execute(upsrt_srvc, upsrt_srvc_params)
        print('_create_or_updt_srvc::row upserted', qry_res)
        return qry_res[len(qry_res) - 1]

    def _create_or_updt_srvc_mdata(self, srvc_rw_id, org_id, service_id, ipfs_data, conn):
        upsrt_srvc_mdata = "INSERT INTO service_metadata (service_row_id, org_id, service_id, price_model, " \
                           "price_in_cogs, display_name, model_ipfs_hash, description, url, json, encoding, type, " \
                           "mpe_address, payment_expiration_threshold, row_updated, row_created) " \
                           "VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) " \
                           "ON DUPLICATE KEY UPDATE service_row_id = %s, price_model  = %s, price_in_cogs = %s, " \
                           "display_name = %s, model_ipfs_hash = %s, description = %s, url = %s, json = %s, " \
                           "encoding = %s, type = %s, mpe_address = %s, payment_expiration_threshold = %s, row_updated = %s "
        price = ipfs_data['pricing']
        price_model = price.get('price_model', '')
        price_in_cogs = price.get('price_in_cogs', '')
        pm_exp_th = ipfs_data.get('payment_expiration_threshold')
        srvc_desc = ipfs_data.get('service_description', {})
        desc = srvc_desc.get('description', '')
        url = srvc_desc.get('url', '')
        json_str = ipfs_data.get('json', '')
        upsrt_srvc_mdata_params = [srvc_rw_id, org_id, service_id, price_model, price_in_cogs, ipfs_data['display_name'],
                                   ipfs_data['model_ipfs_hash'], desc, url, json_str, ipfs_data['encoding'],
                                   ipfs_data['service_type'], ipfs_data['mpe_address'], pm_exp_th, dt.utcnow(), dt.utcnow(),
                                   srvc_rw_id, price_model, price_in_cogs, ipfs_data['display_name'],
                                   ipfs_data['model_ipfs_hash'],desc, url, json_str, ipfs_data['encoding'],
                                   ipfs_data['service_type'], ipfs_data['mpe_address'], pm_exp_th, dt.utcnow()]

        qry_res = conn.execute(upsrt_srvc_mdata, upsrt_srvc_mdata_params)
        print('_create_or_updt_srvc_mdata::row upserted', qry_res)

    def _create_grp(self, srvc_rw_id, org_id, service_id, grp_data, conn):
        insrt_grp = "INSERT INTO service_group (service_row_id, org_id, service_id, group_id, group_name, " \
                    "payment_address, row_updated, row_created)" \
                    "VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
        insrt_grp_params = [srvc_rw_id, org_id, service_id, grp_data['group_id'], grp_data['group_name'],
                            grp_data['payment_address'], dt.utcnow(), dt.utcnow()]

        return conn.execute(insrt_grp, insrt_grp_params)

    def _create_edpts(self, srvc_rw_id, org_id, service_id, endpt_data, conn):
        insrt_endpt = "INSERT INTO service_endpoint (service_row_id, org_id, service_id, group_id, endpoint, " \
                      "row_created, row_updated) " \
                      "VALUES(%s, %s, %s, %s, %s, %s, %s)"
        insrt_endpt_params = [srvc_rw_id, org_id, service_id, endpt_data['group_id'], endpt_data['endpoint'],
                              dt.utcnow(), dt.utcnow()]
        return conn.execute(insrt_endpt, insrt_endpt_params)

    def _create_tags(self, srvc_rw_id, org_id, service_id, tag_name, conn):
        insrt_tag = "INSERT INTO service_tags (service_row_id, org_id, service_id, tag_name, row_created, row_updated) " \
                    "VALUES(%s, %s, %s, %s, %s, %s) " \
                    "ON DUPLICATE KEY UPDATE tag_name = %s, row_updated = %s "
        insrt_tag_params = [srvc_rw_id, org_id, service_id, tag_name, dt.utcnow(), dt.utcnow(), tag_name, dt.utcnow()]
        qry_res = conn.execute(insrt_tag, insrt_tag_params)
        print('_create_tags::qry_res: ', qry_res)

    def _updt_raw_evts(self, row_id, type, err_cd, err_msg, conn):
        try:
            if type == 'REG':
                updt_evts = 'UPDATE registry_events_raw SET processed = 1, error_code = %s, error_msg = %s WHERE row_id = %s '
            elif type == 'MPE':
                updt_evts = 'UPDATE mpe_events_raw SET processed = 1, error_code = %s, error_msg = %s WHERE row_id = %s '
            updt_evts_resp = self.repo.execute(updt_evts, [err_cd, err_msg, row_id])
            print('updt_raw_evts::row updated: ', updt_evts_resp, '|', type)
        except Exception as e:
            self.util_obj.report_slack(type=1, slack_msg=repr(e))
            print('Error in updt_reg_evts_raw::error: ', e)

    def updt_raw_evts(self, row_id, type, err_cd, err_msg):
        conn = self.repo
        self._updt_raw_evts(row_id, type, err_cd, err_msg, conn)

    def del_org(self, org_id):
        self.repo.auto_commit = False
        conn = self.repo
        try:
            self._del_org(org_id=org_id, conn=conn)
            srvcs = self._get_srvcs(org_id=org_id)
            for rec in srvcs:
                self._del_srvc(org_id=org_id, service_id=rec['service_id'], conn=conn)
            self._commit(conn=conn)
        except Exception as e:
            self.util_obj.report_slack(type=1, slack_msg=repr(e))
            self._rollback(conn=conn, err=repr(e))

    def del_srvc(self, org_id, service_id):
        self._del_srvc(org_id=org_id, service_id=service_id, conn=self.repo)

    def create_channel(self, q_dta):
        if q_dta['groupId'][0:2] == '0x':
            q_dta['groupId'] = q_dta['groupId'][2:]
        q_dta['groupId'] = base64.b64encode(bytes.fromhex(q_dta['groupId'])).decode('utf8')
        self._create_channel(q_dta, self.repo)

    def update_channel(self, channel_id, group_id, channel_data):
        print('update_channel::channel_id: ', channel_id)
        self._create_channel(q_dta={
            'sender': channel_data[1],
            'recipient': channel_data[3],
            'nonce': int(channel_data[0]),
            'expiration': channel_data[6],
            'signer': channel_data[2],
            'groupId': group_id,
            'channelId': channel_id,
            'amount': channel_data[5]
        }, conn=self.repo)

    def process_srvc_data(self, org_id, service_id, ipfs_hash, ipfs_data, tags_data):
        self.repo.auto_commit = False
        conn = self.repo
        try:
            self._del_srvc_dpndts(org_id=org_id, service_id=service_id, conn=conn)
            qry_data = self._create_or_updt_srvc(org_id=org_id, service_id=service_id, ipfs_hash=ipfs_hash, conn=conn)
            service_row_id = qry_data['last_row_id']
            print('service_row_id == ', service_row_id)
            self._create_or_updt_srvc_mdata(srvc_rw_id=service_row_id, org_id=org_id, service_id=service_id,
                                            ipfs_data=ipfs_data, conn=conn)
            grps = ipfs_data.get('groups', [])
            cnt = 0
            grp_name_id_dict = {}
            for grp in grps:
                grp_name_id_dict[grp['group_name']] = grp['group_id']
                qry_data = self._create_grp(srvc_rw_id=service_row_id, org_id=org_id, service_id=service_id, conn=conn,
                                            grp_data={
                                                'group_id': grp['group_id'],
                                                'group_name': grp['group_name'],
                                                'payment_address': grp['payment_address']
                                            })
                cnt = cnt + qry_data[0]
            print('rows insert in grp: ', cnt)

            endpts = ipfs_data.get('endpoints', [])
            cnt = 0
            for endpt in endpts:
                qry_data = self._create_edpts(srvc_rw_id=service_row_id, org_id=org_id, service_id=service_id,
                                              conn=conn,
                                              endpt_data={
                                                  'endpoint': endpt['endpoint'],
                                                  'group_id': grp_name_id_dict[endpt['group_name']]
                                              })
                cnt = cnt + qry_data[0]
            print('rows insert in endpt: ', cnt)

            if (tags_data is not None and tags_data[0]):
                tags = tags_data[3]
                for tag in tags:
                    tag = tag.decode('utf-8')
                    tag = tag.rstrip("\u0000")
                    self._create_tags(srvc_rw_id=service_row_id, org_id=org_id, service_id=service_id, tag_name=tag,
                                      conn=conn)
            self._commit(conn=conn)
        except Exception as e:
            self.util_obj.report_slack(type=1, slack_msg=repr(e))
            self._rollback(conn=conn, err=repr(e))

    def process_org_data(self, org_id, org_data):
        self.repo.auto_commit = False
        conn = self.repo
        try:

            if (org_data is not None and org_data[0]):
                self._create_or_updt_org(org_id, org_data, conn)
                self._del_members(org_id=org_id, conn=conn)
                self._create_or_updt_members(org_id, org_data[4], conn)
                self._commit(conn)
        except Exception as e:
            self.util_obj.report_slack(type=1, slack_msg=repr(e))
            self._rollback(conn=conn, err=repr(e))

    def update_tags(self, org_id, service_id, tags_data):
        self.repo.auto_commit = False
        conn = self.repo
        try:
            self._del_tags(org_id=org_id, service_id=service_id, conn=conn)
            if (tags_data is not None and tags_data[0]):
                tags = tags_data[3]
                srvc_data = self._get_srvc_row_id(service_id=service_id, org_id=org_id)
                srvc_rw_id = srvc_data[0]['row_id']
                for tag in tags:
                    tag = tag.decode('utf-8')
                    tag = tag.rstrip("\u0000")
                    self._create_tags(srvc_rw_id=srvc_rw_id, org_id=org_id, service_id=service_id, tag_name=tag,
                                      conn=conn)
                self._commit(conn)
        except Exception as e:
            self.util_obj.report_slack(type=1, slack_msg=repr(e))
            self._rollback(conn=conn, err=repr(e))

    #
    def _commit(self, conn):
        conn.auto_commit = True
        conn.connection.commit()
        print('_commit')
        print(conn.connection)

    def _rollback(self, conn, err):
        print('_rollback ::error: ', err)
        conn.auto_commit = True
        conn.connection.rollback()
Example #9
0
class UserRepository:
    def __init__(self):
        self._repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS)

    def enable_preference(self, user_preference, user_row_id):
        enable_preference_query = \
            "INSERT INTO user_preference (user_row_id, preference_type, communication_type, source, status, " \
            "created_on, updated_on) " \
            "VALUES(%s, %s, %s, %s, %s, %s, %s) " \
            "ON DUPLICATE KEY UPDATE status = %s, updated_on = %s"
        query_response = self._repo.execute(enable_preference_query, [
            user_row_id, user_preference.preference_type,
            user_preference.communication_type, user_preference.source,
            user_preference.status,
            dt.utcnow(),
            dt.utcnow(), user_preference.status,
            dt.utcnow()
        ])
        return query_response

    def get_user_data_for_given_username(self, username):
        get_user_data_query = \
            "SELECT row_id, username, account_id, name, email, email_verified, email_alerts, status, request_id, " \
            "request_time_epoch, is_terms_accepted FROM user WHERE username = %s LIMIT 1"
        user_data = self._repo.execute(get_user_data_query, username)
        return user_data

    def disable_preference(self, user_preference, user_row_id):
        disable_preference_query = \
            "UPDATE user_preference SET status = %s, opt_out_reason = %s WHERE user_row_id = %s " \
            "AND preference_type = %s AND communication_type = %s AND source = %s"
        query_response = self._repo.execute(disable_preference_query, [
            user_preference.status, user_preference.opt_out_reason,
            user_row_id, user_preference.preference_type,
            user_preference.communication_type, user_preference.source
        ])
        return query_response

    def get_user_preferences(self, user_row_id):
        get_user_preference = \
            "SELECT status, preference_type, communication_type, source, opt_out_reason FROM user_preference WHERE user_row_id = %s "
        query_response = self._repo.execute(get_user_preference, [user_row_id])
        return query_response

    def delete_user(self, username):
        query = "DELETE FROM user WHERE username = %s "
        self._repo.execute(query, [username])

    def register_user_data(self, user):
        """ register user data """
        user_data = self.get_user_data_for_given_username(
            username=user.username)
        if bool(user_data):
            raise UserAlreadyExistException()
        query_parameters = [
            user.email, "", user.origin, user.name, user.email,
            user.email_verified, user.email_verified, "", "",
            dt.utcnow(),
            dt.utcnow()
        ]
        query_response = self._repo.execute(
            "INSERT INTO user (username, account_id, origin, name, email, email_verified, status, request_id, "
            "request_time_epoch, row_created, row_updated) "
            "VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
            query_parameters)
        return "SUCCESS"
class TestMonitorService(TestCase):
    def setUp(self):
        self.repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS)

    # @patch("service_status.monitor_service.MonitorServiceCertificate._get_service_endpoint_data")
    # @patch(
    #     "service_status.monitor_service.MonitorServiceCertificate._get_certification_expiration_date_for_given_service")
    # @patch("service_status.monitor_service.MonitorServiceCertificate._get_service_provider_email")
    # @patch("common.utils.Utils.report_slack")
    # @patch("service_status.monitor_service.MonitorServiceCertificate._send_email_notification")
    # def test_monitor_service_certificate_expiry_handler(
    #         self, _send_email_notification, report_slack, _get_service_provider_email,
    #         _get_certification_expiration_date_for_given_service,
    #         _get_service_endpoint_data):
    #     _get_service_endpoint_data.return_value = [
    #         {"endpoint": "https://dummy.com:9999", "org_id": "test_org_id", "service_id": "test_service"}]
    #     _get_certification_expiration_date_for_given_service.return_value = dt.utcnow() + timedelta(days=25)
    #     _get_service_provider_email.return_value = ["*****@*****.**"]
    #     report_slack.return_value = None
    #     _send_email_notification.return_value = None
    #     response = monitor_service_certificates_expiry_handler(event={}, context=None)
    #     assert (response == "success")

    # def test_reset_service_health_failed_status_count(self):
    #     self.tearDown()
    #     created_at = dt.utcnow() - timedelta(days=25)
    #     updated_at = dt.utcnow() - timedelta(hours=6)
    #     last_check_timestamp = dt.utcnow() - timedelta(hours=6)
    #     next_check_timestamp = dt.utcnow() + timedelta(hours=6)
    #     current_timestamp = dt.utcnow()
    #     org_id = "test_org_id"
    #     service_id = "test_service_id"
    #     self.repo.execute(
    #         "INSERT INTO service (row_id, org_id, service_id, service_path, ipfs_hash, is_curated, row_created, row_updated)"
    #         "VALUES(1, %s, %s, '', '', 0, %s, %s)",
    #         [org_id, service_id, created_at, updated_at]
    #     )
    #     self.repo.execute(
    #         "INSERT INTO service_endpoint (service_row_id, org_id, service_id, group_id, endpoint, is_available, "
    #         "last_check_timestamp, next_check_timestamp, failed_status_count, row_created, row_updated)"
    #         "VALUES(1, %s, %s, 'test_group_id', 'https://dummy.io', 0, %s, %s, 10, %s, %s)",
    #         [org_id, service_id, last_check_timestamp, next_check_timestamp, created_at, updated_at]
    #     )
    #     event = {
    #         "httpMethod": "GET",
    #         "pathParameters": {
    #             "org_id": org_id,
    #             "service_id": service_id}
    #     }
    #     response = reset_service_health_next_check_time(event=event, context=None)
    #     db_response = self.repo.execute(
    #         "SELECT next_check_timestamp FROM service_endpoint WHERE org_id = %s AND service_id = %s",
    #         [org_id, service_id])
    #     date_diff = (db_response[0]["next_check_timestamp"] - current_timestamp)
    #     assert ((date_diff.microseconds / 1000000) <= 2)
    #     assert (response["statusCode"] == 200)
    #     response_body = json.loads(response["body"])
    #     assert (response_body["data"] == "We will trigger a health check immediately.")

    def test_get_service_status(self, secure=True):
        url = "https://bh.singularitynet.io:7252"
        url = "138.197.215.173:5001"
        try:
            if secure:
                channel = grpc.secure_channel(
                    url,
                    grpc.ssl_channel_credentials(
                        root_certificates=certificate))
            else:
                channel = grpc.insecure_channel(url)

            stub = heartb_pb2_grpc.HealthStub(channel)
            response = stub.Check(heartb_pb2.HealthCheckRequest(service=""),
                                  timeout=10)
            if response is not None and response.status == 1:
                # logger.info(response.status)
                return 1
            return 0
        except Exception as e:
            # logger.info(f"error in making grpc call::url: {url}, |error: {e}")
            return 0

    def tearDown(self):
        self.repo.execute("DELETE FROM service")
        self.repo.execute("DELETE FROM service_endpoint")
Example #11
0
class TestWalletService(unittest.TestCase):
    def setUp(self):
        self.NETWORKS_NAME = dict(
            (NETWORKS[netId]["name"], netId) for netId in NETWORKS.keys())
        self.repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS)
        self.wallet_service = WalletService(repo=self.repo)

    @patch("common.utils.Utils.report_slack")
    @patch("common.blockchain_util.BlockChainUtil.create_account")
    def test_create_wallet(self, mock_create_account, mock_report_slack):
        mock_create_account.return_value = (
            "323449587122651441342932061624154600879572532581",
            "26561428888193216265620544717131876925191237116680314981303971688115990928499",
        )
        response = self.wallet_service.create_and_register_wallet(
            username="******")
        self.assertDictEqual(
            response, {
                "address": "323449587122651441342932061624154600879572532581",
                "private_key":
                "26561428888193216265620544717131876925191237116680314981303971688115990928499",
                "status": 0,
                "type": "GENERAL"
            })

    @patch("common.utils.Utils.report_slack")
    @patch("wallets.dao.wallet_data_access_object.WalletDAO.get_wallet_details"
           )
    @patch("wallets.dao.wallet_data_access_object.WalletDAO.insert_wallet")
    @patch(
        "wallets.dao.wallet_data_access_object.WalletDAO.add_user_for_wallet")
    def test_register_wallet(self, mock_add_user_for_wallet,
                             mock_insert_wallet, mock_get_wallet_details,
                             mock_report_slack):
        """
            insert new wallet for user
        """
        mock_get_wallet_details.return_value = []
        address = "323449587122651441342932061624154600879572532581"
        type = "GENERAL"
        status = 0
        username = "******"
        response = self.wallet_service.register_wallet(username, address, type,
                                                       status)
        assert response
        self.assertRaises(
            Exception,
            self.wallet_service.register_wallet(username, address, type,
                                                status))

    def test_remove_user_wallet(self):
        wallet_dao = WalletDAO(self.repo)
        username = "******"
        wallet = Wallet(address="32344958712265144", type="GENERAL", status=0)
        wallet_dao.insert_wallet(wallet)
        wallet_dao.add_user_for_wallet(wallet, username)
        self.wallet_service.remove_user_wallet(username)
        wallet_details = wallet_dao.get_wallet_data_by_username(username)
        if len(wallet_details) == 0:
            assert True
        else:
            assert False

    def tearDown(self):
        self.repo.execute("DELETE FROM wallet")
        self.repo.execute("DELETE FROM user_wallet")
Example #12
0
class TestUserSignUp(TestCase):
    def setUp(self):
        self.repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS)

    def test_user_signup(self):
        self.tearDown()
        event = {
            "resource": "/signup",
            "path": "/dapp-user/signup",
            "httpMethod": "GET",
            "headers": {
                "Authorization": "",
                "Host": "ropsten-marketplace.singularitynet.io",
                "origin": "https://beta.singularitynet.io",
                "referer": "https://beta.singularitynet.io/aimarketplace",
                "sec-fetch-mode": "cors",
                "sec-fetch-site": "same-site"
            },
            "multiValueHeaders": {},
            "queryStringParameters": {
                "origin": "PUBLISHER_DAPP"
            },
            "multiValueQueryStringParameters": None,
            "pathParameters": None,
            "stageVariables": None,
            "requestContext": {
                "resourceId": "kduff6",
                "authorizer": {
                    "claims": {
                        "sub": "00af3833-47ce-4b3c-b2f9-af09440dac30",
                        "aud": "2ddk0m23u0ovju2fst34q0t96j",
                        "email_verified": "true",
                        "event_id": "7bb22434-626d-483d-8519-7275d61fdbae",
                        "token_use": "id",
                        "auth_time": "1583996248",
                        "iss":
                        "https://cognito-idp.us-east-1.amazonaws.com/us-east-1_fVrm4ARvK",
                        "nickname": "Ankit",
                        "cognito:username":
                        "******",
                        "exp": "Thu Mar 12 07:57:31 UTC 2020",
                        "iat": "Thu Mar 12 06:57:31 UTC 2020",
                        "email": "*****@*****.**"
                    }
                },
                "resourcePath": "/signup",
                "httpMethod": "GET",
                "extendedRequestId": "JQ-GlEIfIAMF9xw=",
                "requestTime": "12/Mar/2020:06:57:33 +0000",
                "path": "/dapp-user/signup",
                "accountId": "533793137436",
                "protocol": "HTTP/1.1",
                "stage": "mainnet",
                "domainPrefix": "mainnet-marketplace",
                "requestTimeEpoch": 1583996253283,
                "requestId": "0c543e73-343a-4d6f-9cbf-5ad1d5de6ec7",
                "identity": {
                    "cognitoIdentityPoolId": None,
                    "accountId": None,
                    "cognitoIdentityId": None,
                    "caller": None,
                    "sourceIp": "111.93.235.50",
                    "principalOrgId": None,
                    "accessKey": None,
                    "cognitoAuthenticationType": None,
                    "cognitoAuthenticationProvider": None,
                    "userArn": None,
                    "userAgent":
                    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.97 Safari/537.36",
                    "user": None
                },
                "domainName": "ropsten-marketplace.singularitynet.io",
                "apiId": "6nfct1u0yf"
            },
            "isBase64Encoded": False
        }
        response = request_handler(event=event, context=None)
        assert (response["statusCode"] == 200)
        response_body = json.loads(response["body"])
        assert (response_body["status"] == "success")
        record_count = self.repo.execute("SELECT * FROM user")
        assert (len(record_count) == 1)

    def tearDown(self):
        self.repo.execute("DELETE FROM user")
class TestMonitorService(TestCase):
    def setUp(self):
        self.repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS)

    @patch(
        "service_status.monitor_service.MonitorServiceCertificate._get_service_endpoint_data"
    )
    @patch(
        "service_status.monitor_service.MonitorServiceCertificate._get_certification_expiration_date_for_given_service"
    )
    @patch(
        "service_status.monitor_service.MonitorServiceCertificate._get_service_provider_email"
    )
    @patch("common.utils.Utils.report_slack")
    @patch(
        "service_status.monitor_service.MonitorServiceCertificate._send_email_notification"
    )
    def test_monitor_service_certificate_expiry_handler(
            self, _send_email_notification, report_slack,
            _get_service_provider_email,
            _get_certification_expiration_date_for_given_service,
            _get_service_endpoint_data):
        _get_service_endpoint_data.return_value = [{
            "endpoint": "https://dummy.com:9999",
            "org_id": "test_org_id",
            "service_id": "test_service"
        }]
        _get_certification_expiration_date_for_given_service.return_value = dt.utcnow(
        ) + timedelta(days=25)
        _get_service_provider_email.return_value = [
            "*****@*****.**"
        ]
        report_slack.return_value = None
        _send_email_notification.return_value = None
        response = monitor_service_certificates_expiry_handler(event={},
                                                               context=None)
        assert (response == "success")

    def test_reset_service_health_failed_status_count(self):
        self.tearDown()
        created_at = dt.utcnow() - timedelta(days=25)
        updated_at = dt.utcnow() - timedelta(hours=6)
        last_check_timestamp = dt.utcnow() - timedelta(hours=6)
        next_check_timestamp = dt.utcnow() + timedelta(hours=6)
        current_timestamp = dt.utcnow()
        org_id = "test_org_id"
        service_id = "test_service_id"
        self.repo.execute(
            "INSERT INTO service (row_id, org_id, service_id, service_path, ipfs_hash, is_curated, row_created, row_updated)"
            "VALUES(1, %s, %s, '', '', 0, %s, %s)",
            [org_id, service_id, created_at, updated_at])
        self.repo.execute(
            "INSERT INTO service_endpoint (service_row_id, org_id, service_id, group_id, endpoint, is_available, "
            "last_check_timestamp, next_check_timestamp, failed_status_count, row_created, row_updated)"
            "VALUES(1, %s, %s, 'test_group_id', 'https://dummy.io', 0, %s, %s, 10, %s, %s)",
            [
                org_id, service_id, last_check_timestamp, next_check_timestamp,
                created_at, updated_at
            ])
        event = {
            "httpMethod": "GET",
            "pathParameters": {
                "org_id": org_id,
                "service_id": service_id
            }
        }
        response = reset_service_health_next_check_time(event=event,
                                                        context=None)
        db_response = self.repo.execute(
            "SELECT next_check_timestamp FROM service_endpoint WHERE org_id = %s AND service_id = %s",
            [org_id, service_id])
        date_diff = (db_response[0]["next_check_timestamp"] -
                     current_timestamp)
        assert ((date_diff.microseconds / 1000000) <= 2)
        assert (response["statusCode"] == 200)
        response_body = json.loads(response["body"])
        assert (response_body["data"] ==
                "We will trigger a health check immediately.")

    def tearDown(self):
        self.repo.execute("DELETE FROM service")
        self.repo.execute("DELETE FROM service_endpoint")
class HandleContractsDB:
    def __init__(self, err_obj, net_id):
        self.err_obj = err_obj
        self.repo = Repository(net_id, NETWORKS)
        self.util_obj = Utils()
        self.ipfs_utll = IPFSUtil(IPFS_URL['url'], IPFS_URL['port'])
        self.s3_util = S3Util(S3_BUCKET_ACCESS_KEY, S3_BUCKET_SECRET_KEY)

    # read operations
    def read_registry_events(self):
        query = 'select * from registry_events_raw where processed = 0 order by block_no asc limit ' + EVNTS_LIMIT
        evts_dta = self.repo.execute(query)
        print('read_registry_events::read_count: ', len(evts_dta))
        return evts_dta

    def read_mpe_events(self):
        query = 'select * from mpe_events_raw where processed = 0 order by block_no asc limit ' + EVNTS_LIMIT
        evts_dta = self.repo.execute(query)
        print('read_mpe_events::read_count: ', len(evts_dta))
        return evts_dta

    def _get_srvc_row_id(self, org_id, service_id):
        print('get_srvc_row_id::service_id: ', service_id)
        query = 'SELECT row_id FROM service WHERE service_id = %s AND org_id = %s '
        srvc_data = self.repo.execute(query, [service_id, org_id])
        print('get_srvc_row_id::srvc_data: ', srvc_data)
        return srvc_data

    def _get_srvcs(self, org_id):
        query = 'SELECT * FROM service WHERE org_id = %s '
        srvc_data = self.repo.execute(query, (org_id))
        print('_get_srvcs::srvc_data: ', srvc_data)
        return srvc_data

    # write operations
    def _create_or_updt_org(self, org_id, org_name, owner_address, org_metadata_uri, conn):
        upsert_qry = "Insert into organization (org_id, organization_name, owner_address, org_metadata_uri, row_updated, row_created) " \
                     "VALUES ( %s, %s, %s, %s, %s , %s) " \
                     "ON DUPLICATE KEY UPDATE organization_name = %s, owner_address = %s, org_metadata_uri = %s, row_updated = %s  "
        upsert_params = [org_id, org_name, owner_address, org_metadata_uri, dt.utcnow(), dt.utcnow(), org_name, owner_address, org_metadata_uri,
                         dt.utcnow()]
        print('upsert_qry: ', upsert_qry)
        qry_resp = conn.execute(upsert_qry, upsert_params)
        print('_create_or_updt_org::row upserted: ', qry_resp)

    def _del_org_groups(self, org_id, conn):
        delete_query = conn.execute(
            "DELETE FROM org_group WHERE org_id = %s ", [org_id])

    def _create_org_groups(self, org_id, groups, conn):
        insert_qry = "Insert into org_group (org_id, group_id, group_name, payment, row_updated, row_created) " \
                     "VALUES ( %s, %s, %s, %s, %s, %s ) "
        cnt = 0
        for group in groups:
            insert_params = [org_id, group['group_id'], group['group_name'], json.dumps(
                group['payment']), dt.utcnow(), dt.utcnow()]
            qry_res = conn.execute(insert_qry, insert_params)
            cnt = cnt + qry_res[0]
        print('_create_org_groups::row inserted', cnt)

    def _create_or_updt_members(self, org_id, members, conn):
        upsrt_members = "INSERT INTO members (org_id, member, row_created, row_updated)" \
                        "VALUES ( %s, %s, %s, %s )" \
                        "ON DUPLICATE KEY UPDATE row_updated = %s "
        cnt = 0
        for member in members:
            upsrt_members_params = [org_id, member,
                                    dt.utcnow(), dt.utcnow(), dt.utcnow()]
            qry_res = conn.execute(upsrt_members, upsrt_members_params)
            cnt = cnt + qry_res[0]
        print('create_or_updt_members::row upserted', cnt)

    def _create_channel(self, q_dta, conn):
        upsrt_mpe_chnl = "INSERT INTO mpe_channel (channel_id, sender, recipient, groupId, balance_in_cogs, pending, nonce, " \
                         "expiration, signer, row_created, row_updated) " \
                         "VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) " \
                         "ON DUPLICATE KEY UPDATE balance_in_cogs = %s, pending = %s, nonce = %s, " \
                         "expiration = %s, row_updated = %s"
        upsrt_mpe_chnl_params = [q_dta['channelId'], q_dta['sender'], q_dta['recipient'], q_dta['groupId'],
                                 q_dta['amount'], 0.0, q_dta['nonce'], q_dta['expiration'], q_dta['signer'], dt.utcnow(
        ),
            dt.utcnow(), q_dta['amount'], 0.0, q_dta['nonce'], q_dta['expiration'], dt.utcnow()]
        qry_res = conn.execute(upsrt_mpe_chnl, upsrt_mpe_chnl_params)
        print('_create_channel::row upserted', qry_res)

    def _del_srvc(self, org_id, service_id, conn):
        del_srvc = 'DELETE FROM service WHERE service_id = %s AND org_id = %s '
        qry_res = conn.execute(del_srvc, [service_id, org_id])
        print('_del_srvc::rows deleted: ', qry_res)

    def _del_org(self, org_id, conn):
        del_org = 'DELETE FROM organization WHERE org_id = %s '
        qry_res = conn.execute(del_org, org_id)
        print('_del_org::rows deleted: ', qry_res)

    def _del_members(self, org_id, conn):
        del_org = 'DELETE FROM members WHERE org_id = %s '
        qry_res = conn.execute(del_org, org_id)
        print('_del_members::rows deleted: ', qry_res)

    def _del_tags(self, org_id, service_id, conn):
        del_srvc_tags = 'DELETE FROM service_tags WHERE service_id = %s AND org_id = %s '
        del_srvc_tags_count = conn.execute(del_srvc_tags, [service_id, org_id])
        print('_del_tags::del_srvc_tags: ', del_srvc_tags_count)

    def _del_srvc_dpndts(self, org_id, service_id, conn):
        print("_del_srvc_dpndts::service_id: ",
              service_id, '|org_id: ', org_id)
        del_srvc_grps = 'DELETE FROM service_group WHERE service_id = %s AND org_id = %s '
        del_srvc_grps_count = conn.execute(del_srvc_grps, [service_id, org_id])

        del_srvc_endpts = 'DELETE FROM service_endpoint WHERE service_id = %s AND org_id = %s '
        del_srvc_endpts_count = conn.execute(
            del_srvc_endpts, [service_id, org_id])

        self._del_tags(org_id=org_id, service_id=service_id, conn=conn)
        print('_del_srvc_dpndts::del_srvc_grps: ', del_srvc_grps_count,
              '|del_srvc_endpts: ', del_srvc_endpts_count)

    def _create_or_updt_srvc(self, org_id, service_id, ipfs_hash, conn):
        upsrt_srvc = "INSERT INTO service (org_id, service_id, is_curated, ipfs_hash, row_created, row_updated) " \
                     "VALUES (%s, %s, %s, %s, %s, %s) " \
                     "ON DUPLICATE KEY UPDATE ipfs_hash = %s, row_updated = %s "
        upsrt_srvc_params = [org_id, service_id, 0, ipfs_hash,
                             dt.utcnow(), dt.utcnow(), ipfs_hash, dt.utcnow()]
        qry_res = conn.execute(upsrt_srvc, upsrt_srvc_params)
        print('_create_or_updt_srvc::row upserted', qry_res)
        return qry_res[len(qry_res) - 1]

    def _create_or_updt_srvc_mdata(self, srvc_rw_id, org_id, service_id, ipfs_data, assets_url, conn):
        upsrt_srvc_mdata = "INSERT INTO service_metadata (service_row_id, org_id, service_id, " \
                           "display_name, model_ipfs_hash, description, url, json, encoding, type, " \
                           "mpe_address, assets_hash , assets_url, service_rating, row_updated, row_created) " \
                           "VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s ) " \
                           "ON DUPLICATE KEY UPDATE service_row_id = %s, " \
                           "display_name = %s, model_ipfs_hash = %s, description = %s, url = %s, json = %s, " \
                           "encoding = %s, type = %s, mpe_address = %s, row_updated = %s ,assets_hash = %s ,assets_url = %s"

        srvc_desc = ipfs_data.get('service_description', {})
        desc = srvc_desc.get('description', '')
        url = srvc_desc.get('url', '')
        json_str = ipfs_data.get('json', '')
        assets_hash = json.dumps(ipfs_data.get('assets', {}))
        assets_url_str = json.dumps(assets_url)
        upsrt_srvc_mdata_params = [srvc_rw_id, org_id, service_id, ipfs_data['display_name'],
                                   ipfs_data['model_ipfs_hash'], desc, url, json_str, ipfs_data['encoding'],
                                   ipfs_data['service_type'], ipfs_data['mpe_address'], assets_hash, assets_url_str, '{"rating": 0.0 , "total_users_rated": 0 }', dt.utcnow(
        ), dt.utcnow(),
            srvc_rw_id, ipfs_data['display_name'],
            ipfs_data['model_ipfs_hash'], desc, url, json_str, ipfs_data['encoding'],
            ipfs_data['service_type'], ipfs_data['mpe_address'], dt.utcnow(), assets_hash, assets_url_str]

        qry_res = conn.execute(upsrt_srvc_mdata, upsrt_srvc_mdata_params)
        print('_create_or_updt_srvc_mdata::row upserted', qry_res)

    def _create_grp(self, srvc_rw_id, org_id, service_id, grp_data, conn):
        insrt_grp = "INSERT INTO service_group (service_row_id, org_id, service_id, group_id, group_name," \
                    "pricing, row_updated, row_created)" \
                    "VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
        insrt_grp_params = [srvc_rw_id, org_id, service_id, grp_data['group_id'], grp_data['group_name'],
                            grp_data['pricing'], dt.utcnow(), dt.utcnow()]

        return conn.execute(insrt_grp, insrt_grp_params)

    def _create_edpts(self, srvc_rw_id, org_id, service_id, endpt_data, conn):
        insrt_endpt = "INSERT INTO service_endpoint (service_row_id, org_id, service_id, group_id, endpoint, " \
                      "row_created, row_updated) " \
                      "VALUES(%s, %s, %s, %s, %s, %s, %s)"
        insrt_endpt_params = [srvc_rw_id, org_id, service_id, endpt_data['group_id'], endpt_data['endpoint'],
                              dt.utcnow(), dt.utcnow()]
        return conn.execute(insrt_endpt, insrt_endpt_params)

    def _create_tags(self, srvc_rw_id, org_id, service_id, tag_name, conn):
        insrt_tag = "INSERT INTO service_tags (service_row_id, org_id, service_id, tag_name, row_created, row_updated) " \
                    "VALUES(%s, %s, %s, %s, %s, %s) " \
                    "ON DUPLICATE KEY UPDATE tag_name = %s, row_updated = %s "
        insrt_tag_params = [srvc_rw_id, org_id, service_id,
                            tag_name, dt.utcnow(), dt.utcnow(), tag_name, dt.utcnow()]
        qry_res = conn.execute(insrt_tag, insrt_tag_params)
        print('_create_tags::qry_res: ', qry_res)

    def _updt_raw_evts(self, row_id, type, err_cd, err_msg, conn):
        try:
            if type == 'REG':
                updt_evts = 'UPDATE registry_events_raw SET processed = 1, error_code = %s, error_msg = %s WHERE row_id = %s '
            elif type == 'MPE':
                updt_evts = 'UPDATE mpe_events_raw SET processed = 1, error_code = %s, error_msg = %s WHERE row_id = %s '
            updt_evts_resp = self.repo.execute(
                updt_evts, [err_cd, err_msg, row_id])
            print('updt_raw_evts::row updated: ', updt_evts_resp, '|', type)
        except Exception as e:
            self.util_obj.report_slack(
                type=1, slack_msg=repr(e), SLACK_HOOK=SLACK_HOOK)
            print('Error in updt_reg_evts_raw::error: ', e)

    def updt_raw_evts(self, row_id, type, err_cd, err_msg):
        conn = self.repo
        self._updt_raw_evts(row_id, type, err_cd, err_msg, conn)

    def del_org(self, org_id):
        self.repo.auto_commit = False
        conn = self.repo
        try:
            self._del_org(org_id=org_id, conn=conn)
            self._del_org_groups(org_id=org_id, conn=conn)
            srvcs = self._get_srvcs(org_id=org_id)
            for rec in srvcs:
                self._del_srvc(
                    org_id=org_id, service_id=rec['service_id'], conn=conn)
            self._commit(conn=conn)
        except Exception as e:
            self.util_obj.report_slack(
                type=1, slack_msg=repr(e), SLACK_HOOK=SLACK_HOOK)
            self._rollback(conn=conn, err=repr(e))

    def del_srvc(self, org_id, service_id):
        self._del_srvc(org_id=org_id, service_id=service_id, conn=self.repo)

    def create_channel(self, q_dta):
        if q_dta['groupId'][0:2] == '0x':
            q_dta['groupId'] = q_dta['groupId'][2:]
        q_dta['groupId'] = base64.b64encode(
            bytes.fromhex(q_dta['groupId'])).decode('utf8')
        self._create_channel(q_dta, self.repo)

    def update_channel(self, channel_id, group_id, channel_data):
        print('update_channel::channel_id: ', channel_id)
        self._create_channel(q_dta={
            'sender': channel_data[1],
            'recipient': channel_data[3],
            'nonce': int(channel_data[0]),
            'expiration': channel_data[6],
            'signer': channel_data[2],
            'groupId': group_id,
            'channelId': channel_id,
            'amount': channel_data[5]
        }, conn=self.repo)

    def _push_asset_to_s3_using_hash(self, hash, org_id, service_id):
        io_bytes = self.ipfs_utll.read_bytesio_from_ipfs(hash)
        filename = hash.split("/")[1]
        new_url = self.s3_util.push_io_bytes_to_s3(ASSETS_PREFIX + "/" + org_id + "/" + service_id + "/" + filename, ASSETS_BUCKET_NAME,
                                                   io_bytes)
        return new_url

    def _comapre_assets_and_push_to_s3(self, existing_assets_hash, new_assets_hash, existing_assets_url, org_id,
                                       service_id):
        """

        :param existing_assets_hash: contains asset_type and its has value stored in ipfs
        :param new_assets_hash:  contains asset type and its updated hash value in ipfs
        :param existing_assets_url:  contains asset type and s3_url value for given asset_type
        :param org_id:
        :param service_id:
        :return: dict of asset_type and new_s3_url
        """
        # this function compare assets and deletes and update the new assets

        assets_url_mapping = {}

        if not existing_assets_hash:
            existing_assets_hash = {}
        if not existing_assets_url:
            existing_assets_url = {}
        if not new_assets_hash:
            new_assets_hash = {}

        for new_asset_type, new_asset_hash in new_assets_hash.items():

            if isinstance(new_asset_hash, list):
                # if this asset_type contains list of assets than remove all existing assetes from s3 and add all new assets to s3
                #
                new_urls_list = []

                # remove all existing assets if exits
                if new_asset_type in existing_assets_url:
                    for url in existing_assets_url[new_asset_type]:
                        self.s3_util.delete_file_from_s3(url)

                # add new files to s3 and update the url
                for hash in new_assets_hash[new_asset_type]:
                    new_urls_list.append(
                        self._push_asset_to_s3_using_hash(hash, org_id, service_id))

                assets_url_mapping[new_asset_type] = new_urls_list

            elif isinstance(new_asset_hash, str):
                # if this asset_type has single value
                if new_asset_type in existing_assets_hash and existing_assets_hash[new_asset_type] == new_asset_hash:
                    # file is not updated , add existing url
                    assets_url_mapping[new_asset_type] = existing_assets_url[new_asset_type]

                else:
                    if new_asset_type in existing_assets_url:
                        url_of_file_to_be_removed = existing_assets_url[new_asset_type]
                        self.s3_util.delete_file_from_s3(
                            url_of_file_to_be_removed)

                    hash_of_file_to_be_pushed_to_s3 = new_assets_hash[new_asset_type]

                    assets_url_mapping[new_asset_type] = self._push_asset_to_s3_using_hash(
                        hash_of_file_to_be_pushed_to_s3, org_id, service_id)

            else:
                logger.info(
                    "unknown type assets for org_id %s  service_id %s", org_id, service_id)

        return assets_url_mapping

    def _get_new_assets_url(self, org_id, service_id, new_ipfs_data):
        new_assets_hash = new_ipfs_data.get('assets', {})
        existing_assets_hash = {}
        existing_assets_url = {}

        service_metadata_repo = ServiceMetadataRepository()
        existing_service_metadata = service_metadata_repo.get_service_metatdata_by_servcie_id_and_org_id(
            service_id, org_id)

        if existing_service_metadata:
            existing_assets_hash = existing_service_metadata.assets_hash
            existing_assets_url = existing_service_metadata.assets_url
        assets_url_mapping = self._comapre_assets_and_push_to_s3(existing_assets_hash, new_assets_hash, existing_assets_url, org_id,
                                                                 service_id)
        return assets_url_mapping

    def process_srvc_data(self, org_id, service_id, ipfs_hash, ipfs_data, tags_data):
        self.repo.auto_commit = False
        conn = self.repo
        try:

            assets_url = self._get_new_assets_url(
                org_id, service_id, ipfs_data)

            self._del_srvc_dpndts(
                org_id=org_id, service_id=service_id, conn=conn)
            qry_data = self._create_or_updt_srvc(
                org_id=org_id, service_id=service_id, ipfs_hash=ipfs_hash, conn=conn)
            service_row_id = qry_data['last_row_id']
            print('service_row_id == ', service_row_id)
            self._create_or_updt_srvc_mdata(srvc_rw_id=service_row_id, org_id=org_id, service_id=service_id,
                                            ipfs_data=ipfs_data, assets_url=assets_url, conn=conn)
            grps = ipfs_data.get('groups', [])
            group_insert_count = 0
            for grp in grps:
                qry_data = self._create_grp(srvc_rw_id=service_row_id, org_id=org_id, service_id=service_id, conn=conn,
                                            grp_data={
                                                'group_id': grp['group_id'],
                                                'group_name': grp['group_name'],
                                                'pricing': json.dumps(grp['pricing'])
                                            })
                group_insert_count = group_insert_count + qry_data[0]
                endpts = grp.get('endpoints', [])
                endpt_insert_count = 0
                for endpt in endpts:
                    qry_data = self._create_edpts(srvc_rw_id=service_row_id, org_id=org_id, service_id=service_id,
                                                  conn=conn,
                                                  endpt_data={
                                                      'endpoint': endpt,
                                                      'group_id': grp['group_id'],
                                                  })
                    endpt_insert_count = endpt_insert_count + qry_data[0]
                print('rows insert in endpt: ', endpt_insert_count)
            print('rows insert in grp: ', group_insert_count)

            if (tags_data is not None and tags_data[0]):
                tags = tags_data[3]
                for tag in tags:
                    tag = tag.decode('utf-8')
                    tag = tag.rstrip("\u0000")
                    self._create_tags(srvc_rw_id=service_row_id, org_id=org_id, service_id=service_id, tag_name=tag,
                                      conn=conn)
            self._commit(conn=conn)

        except Exception as e:
            self.util_obj.report_slack(
                type=1, slack_msg=repr(e), SLACK_HOOK=SLACK_HOOK)
            self._rollback(conn=conn, err=repr(e))

    def process_org_data(self, org_id, org_data, ipfs_data, org_metadata_uri):
        self.repo.auto_commit = False
        conn = self.repo
        try:

            if (org_data is not None and org_data[0]):
                self._create_or_updt_org(
                    org_id=org_id, org_name=ipfs_data["org_name"], owner_address=org_data[3], org_metadata_uri=org_metadata_uri, conn=conn)
                self._del_org_groups(org_id=org_id, conn=conn)
                self._create_org_groups(
                    org_id=org_id, groups=ipfs_data["groups"], conn=conn)
                self._del_members(org_id=org_id, conn=conn)
                self._create_or_updt_members(org_id, org_data[4], conn)
                self._commit(conn)
        except Exception as e:
            self.util_obj.report_slack(
                type=1, slack_msg=repr(e), SLACK_HOOK=SLACK_HOOK)
            self._rollback(conn=conn, err=repr(e))

    def update_tags(self, org_id, service_id, tags_data):
        self.repo.auto_commit = False
        conn = self.repo
        try:
            self._del_tags(org_id=org_id, service_id=service_id, conn=conn)
            if (tags_data is not None and tags_data[0]):
                tags = tags_data[3]
                srvc_data = self._get_srvc_row_id(
                    service_id=service_id, org_id=org_id)
                srvc_rw_id = srvc_data[0]['row_id']
                for tag in tags:
                    tag = tag.decode('utf-8')
                    tag = tag.rstrip("\u0000")
                    self._create_tags(srvc_rw_id=srvc_rw_id, org_id=org_id, service_id=service_id, tag_name=tag,
                                      conn=conn)
                self._commit(conn)
        except Exception as e:
            self.util_obj.report_slack(
                type=1, slack_msg=repr(e), SLACK_HOOK=SLACK_HOOK)
            self._rollback(conn=conn, err=repr(e))

    #
    def _commit(self, conn):
        conn.auto_commit = True
        conn.connection.commit()
        print('_commit')
        print(conn.connection)

    def _rollback(self, conn, err):
        print('_rollback ::error: ', err)
        conn.auto_commit = True
        conn.connection.rollback()
class TestUserPreference(TestCase):
    def setUp(self):
        self.repo = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS)
        username = "******"
        account_id = "123"
        name = "dummy_user_2"
        status = 1
        request_id = "id_123"
        user_one_row_id = 101
        current_time = datetime.utcnow()
        epoch_time = current_time.timestamp()
        self.repo.execute(
            "INSERT INTO user (row_id, username, account_id, name, email, email_verified, status, request_id, "
            "request_time_epoch, row_created, row_updated) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
            [
                user_one_row_id, username, account_id, name, username, 1,
                status, request_id, epoch_time, current_time, current_time
            ])
        username = "******"
        account_id = "123"
        name = "dummy_user_2"
        status = 1
        user_two_row_id = 202
        request_id = "id_123"
        self.repo.execute(
            "INSERT INTO user (row_id, username, account_id, name, email, email_verified, status, request_id, "
            "request_time_epoch, row_created, row_updated) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
            [
                user_two_row_id, username, account_id, name, username, 1,
                status, request_id, epoch_time, current_time, current_time
            ])
        row_id = 1111
        user_row_id = user_two_row_id
        preference_type = "FEATURE_RELEASE"
        communication_type = "SMS"
        source = "PUBLISHER_DAPP"
        opt_out_reason = None
        status = 0
        self.repo.execute(
            "INSERT INTO user_preference(row_id, user_row_id, preference_type, communication_type, source, "
            "opt_out_reason, status, created_on, updated_on) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s)",
            [
                row_id, user_row_id, preference_type, communication_type,
                source, opt_out_reason, status, current_time, current_time
            ])

    def test_add_or_update_user_preference(self):
        event = {
            "requestContext": {
                "authorizer": {
                    "claims": {
                        "email": "*****@*****.**"
                    }
                }
            },
            "body":
            json.dumps([{
                "preference_type": "FEATURE_RELEASE",
                "communication_type": "EMAIL",
                "source": "PUBLISHER_DAPP",
                "status": True
            }, {
                "preference_type": "WEEKLY_SUMMARY",
                "communication_type": "EMAIL",
                "source": "PUBLISHER_DAPP",
                "status": True
            }])
        }
        response = add_or_update_user_preference(event=event, context=None)
        assert (response["statusCode"] == 200)
        response_body = json.loads(response["body"])
        assert (response_body["status"] == "success")
        assert (response_body["data"] == [
            Status.ENABLED.value, Status.ENABLED.value
        ])
        event = {
            "requestContext": {
                "authorizer": {
                    "claims": {
                        "email": "*****@*****.**"
                    }
                }
            },
            "body":
            json.dumps([{
                "preference_type": "FEATURE_RELEASE",
                "communication_type": "EMAIL",
                "source": "PUBLISHER_DAPP",
                "status": False,
                "opt_out_reason": "Mail too frequent!"
            }])
        }
        response = add_or_update_user_preference(event=event, context=None)
        assert (response["statusCode"] == 200)
        response_body = json.loads(response["body"])
        assert (response_body["status"] == "success")
        assert (response_body["data"] == [Status.DISABLED.value])
        event = {
            "requestContext": {
                "authorizer": {
                    "claims": {
                        "email": "*****@*****.**"
                    }
                }
            },
            "body":
            json.dumps([{
                "preference_type": "FEATURE_RELEASE",
                "communication_type": "EMAIL",
                "source": "PUBLISHER_DAPP",
                "status": True
            }, {
                "preference_type": "WEEKLY_SUMMARY",
                "communication_type": "EMAIL",
                "source": "PUBLISHER_DAPP",
                "status": False,
                "opt_out_reason": "Mail too frequent!"
            }])
        }
        response = add_or_update_user_preference(event=event, context=None)
        assert (response["statusCode"] == 200)
        response_body = json.loads(response["body"])
        assert (response_body["status"] == "success")
        assert (response_body["data"] == [
            Status.ENABLED.value, Status.DISABLED.value
        ])

    def test_get_user_preference(self):
        event = {
            "requestContext": {
                "authorizer": {
                    "claims": {
                        "email": "*****@*****.**"
                    }
                }
            }
        }
        response = get_user_preference(event=event, context=None)
        print(response)
        assert (response["statusCode"] == 200)
        response_body = json.loads(response["body"])
        assert (response_body["status"] == "success")
        assert (
            response_body["data"][0]["preference_type"] == "FEATURE_RELEASE")
        assert (response_body["data"][0]["communication_type"] == "SMS")
        assert (response_body["data"][0]["source"] == "PUBLISHER_DAPP")
        assert (response_body["data"][0]["status"] == 0)

    def tearDown(self):
        self.repo.execute("DELETE FROM user_preference")
        self.repo.execute("DELETE FROM user")