def test_init_update_new_org(self, publish_mock): """ Tests how new changeset is initialised a new org (never synced). Args: publish_status_mock(Mock): pubsub publish function mock """ Org(id='test', changeset_started_at=None, changeset_completed_at=None).put() sync_utils.init_update('test') org = Org.get_by_id('test') # changeset has been incremented self.assertEqual(org.changeset, 0) # and changeset timestamps are set self.assertIsNotNone(org.changeset_started_at) self.assertIsNone(org.changeset_completed_at) # and the update task has been created task_count = len(self.taskqueue.get_filtered_tasks()) self.assertEqual(task_count, 1) # and changeset status is published publish_mock.assert_called_once_with('test', 0, 'syncing')
def test_init_update_inactive_update_cycle(self, publish_mock): """ Verifies that a new changeset is not created for an org with a sync in progress with an active update cycle (ie. has a task on adapter-update). Args: publish_mock(Mock): mock of the changeset publish function """ some_date = datetime.utcnow() Org(id='test', changeset=10, changeset_started_at=some_date, changeset_completed_at=None, update_cycle_active=False).put() sync_utils.init_update('test') org = Org.get_by_id('test') # changeset has not been changed self.assertEqual(org.changeset, 10) # and changeset timestamps have not been changed self.assertIsNotNone(org.changeset_started_at) self.assertIsNone(org.changeset_completed_at) # and a new update task has been created because the update_cycle_active was false task_count = len(self.taskqueue.get_filtered_tasks()) self.assertEqual(task_count, 1) # and changeset status is published publish_mock.assert_called_once_with('test', 10, 'syncing')
def test_init_update_existing_org(self, publish_mock): """ Tests how new changeset is initialised an existing org (previously synced). Args: publish_mock(Mock): mock of the changeset publish function """ some_date = datetime.utcnow() Org(id='test', changeset=10, changeset_started_at=some_date, changeset_completed_at=some_date, last_update_cycle_completed_at=some_date - timedelta(hours=1)).put() sync_utils.init_update('test') org = Org.get_by_id('test') # changeset has been incremented self.assertEqual(org.changeset, 11) # and changeset timestamps are set self.assertIsNotNone(org.changeset_started_at) self.assertIsNone(org.changeset_completed_at) # and the update task has been created task_count = len(self.taskqueue.get_filtered_tasks()) self.assertEqual(task_count, 1) # and changeset status is published publish_mock.assert_called_once_with('test', 11, 'syncing')
def __init__(self, org_uid, provider_config, redirect_url): """ Prepares the org for linking. Args: org_uid(str): org identifier provider_config(ProviderConfig): ndb model holding the provider config for the org redirect_url(str): the url to which the linker should send the user to after saving qbo tokens """ org = Org.get_by_id(org_uid) or Org(id=org_uid, provider='qbo', provider_config=provider_config.key) # If this is a `relink`, check the org has a provider_config set if org.provider_config is None: org.provider_config = provider_config.key msg = "setting org status to linking (status {}) and saving redirect_url ({})'" logging.info(msg.format(LINKING, redirect_url)) org.status = LINKING org.redirect_url = redirect_url org.put() callback_uri = client_utils.get_redirect_uri_for(org.provider) super(QboAuthorizationSession, self).__init__( provider_config.client_id, redirect_uri=callback_uri, scope=SCOPES, state=org_uid )
def __init__(self, org_uid, provider_config, redirect_url): """ Prepares the org for linking. Args: org_uid(str): org identifier provider_config(ProviderConfig): ndb model holding the provider config for the org redirect_url(str): the url to which the linker should send the user to after saving zuora tokens """ org = Org.get_by_id(org_uid) or Org( id=org_uid, provider='zuora', provider_config=provider_config.key) # If this is a `relink`, check the org has a provider_config set if org.provider_config is None: org.provider_config = provider_config.key msg = "setting org status to linking (status {}) and saving redirect_url ({})'" logging.info(msg.format(LINKING, redirect_url)) org.status = LINKING org.redirect_url = redirect_url org.put() self.org = org
def test_status_endpoint(self): """ A few test cases for the status endpoint. """ link_date = datetime(2010, 1, 1) connect_date = datetime(2010, 1, 2) Org(id='test1', status=1, changeset=0, linked_at=link_date, connected_at=connect_date).put() response = self.app.get('/api/data_sources/test1/status') # 200 response self.assertEqual(response.status_code, 200) # meta contains data_source_id self.assertEqual(response.json['meta']['data_source_id'], 'test1') # link_status is present self.assertEqual(response.json['data'][0]['relationships']['link_status']['data']['id'], 'test1') self.assertEqual(response.json['included'][1]['attributes']['status'], 'unlinked') self.assertEqual(response.json['included'][1]['attributes']['linked_at'], '2010-01-01T00:00:00') # connect_status is present self.assertEqual(response.json['data'][0]['relationships']['connection_status']['data']['id'], 'test1') self.assertEqual(response.json['included'][0]['attributes']['status'], 'disconnected') self.assertEqual(response.json['included'][0]['attributes']['connected_at'], '2010-01-02T00:00:00') # last changeset data is present self.assertEqual(response.json['data'][0]['last_changeset_status']['data']['id'], 'test1_0') self.assertEqual( response.json['data'][0]['last_changeset_status']['links']['related'], '/data_sources/test1/changesets/0/status' ) # test CONNECTED internal status Org(id='test2', status=2, changeset=0, linked_at=link_date, connected_at=connect_date).put() response = self.app.get('/api/data_sources/test2/status') self.assertEqual(response.json['included'][1]['attributes']['status'], 'linked') self.assertEqual(response.json['included'][0]['attributes']['status'], 'connected') # test DISCONNECTED internal status Org(id='test3', status=3, changeset=0, linked_at=link_date, connected_at=connect_date).put() response = self.app.get('/api/data_sources/test3/status') self.assertEqual(response.json['included'][1]['attributes']['status'], 'linked') self.assertEqual(response.json['included'][0]['attributes']['status'], 'disconnected') # ensure changeset -1 is not exposed as last_changeset_status Org(id='test4', status=2, changeset=-1, linked_at=link_date, connected_at=connect_date).put() response = self.app.get('/api/data_sources/test4/status') self.assertFalse('last_changeset_status' in response.json['data'][0]) # test missing org response = self.app.get('/api/data_sources/blah/status') self.assertEqual(response.status_code, 200)
def test_items_to_tasks(self): """ Verifies that multiple datastore page fetches result in properly emitted items. """ Org(id='test1').put() Org(id='test2').put() Org(id='test3').put() emitted_orgs = [] for org in datastore_utils.emit_items(Org.query()): emitted_orgs.append(org) orgs = Org.query().fetch() self.assertListEqual(orgs, emitted_orgs)
def mark_as_disconnected(org_uid, deactivate_update_cycle): """ Flags an org as disconnected by changing its status to DISCONNECTED and completing current changeset. This is useful if the sync gives up because of authentication issues with the provider for example. This does not forcibly disconnect the org by deleting the auth keys. Publishes an error status for changeset currently being ingested. Args: org_uid(str): org identifier deactivate_update_cycle(bool): indicates if the update_cycle_active flag should be set to false """ logging.info("marking the org as disconnected (status value {})".format( DISCONNECTED)) org = Org.get_by_id(org_uid) org.status = DISCONNECTED if deactivate_update_cycle: org.update_cycle_active = False org.put() publish_status(org_uid, CONNECT_STATUS_TYPE, CONNECT_STATUS_DISCONNECTED) if is_changeset_in_progress(org): logging.info( "publishing error changeset status for changeset {}:{}".format( org_uid, org.changeset)) publish_changeset_status(org_uid, org.changeset, CHANGESET_STATUS_ERROR)
def mark_as_connected(org_uid, also_linked=False): """ Flags an org as connected. The org will get included in update cycles from this point. Args: org_uid(str): org identifier """ logging.info( "marking the org as connected (status value {})".format(CONNECTED)) org = Org.get_by_id(org_uid) org.status = CONNECTED if also_linked: org.linked_at = datetime.utcnow() org.connected_at = datetime.utcnow() org.put() if also_linked: publish_status(org_uid, LINK_STATUS_TYPE, LINK_STATUS_LINKED) publish_status(org_uid, CONNECT_STATUS_TYPE, CONNECT_STATUS_CONNECTED) if is_changeset_in_progress(org): logging.info( "publishing syncing changeset status for changeset {}:{}".format( org_uid, org.changeset)) publish_changeset_status(org_uid, org.changeset, CHANGESET_STATUS_SYNCING)
def __init__(self, org_uid, callback_args): """ Extracts QBO file details and access tokens from the QBO callback. Args: org_uid(str): org identifier redirect_uri(str): uri to which qbo sends tokens callback_args(dict): request parameters send by qbo """ self.org_uid = org_uid self.callback_args = callback_args self.org = Org.get_by_id(org_uid) if callback_args.get('error') == 'access_denied': raise AuthCancelled(self.org) entity_id = callback_args.get('realmId') if self.org.entity_id and self.org.entity_id != entity_id: raise MismatchingFileConnectionAttempt(self.org) logging.info("saving entity_id '{}' for org '{}'".format(entity_id, org_uid)) self.org.entity_id = entity_id self.org.put() super(QboTokenSession, self).__init__( redirect_uri=client_utils.get_redirect_uri_for(self.org.provider), )
def __init__(self, org_uid, callback_args): """ Third step of the Oauth1 flow. Processing the callback from Xero and using the callback params for fetching the access token. Args: org_uid(str): org identifier callback_args(dict): request parameters send by Xero """ self.org_uid = org_uid self.callback_args = callback_args self.org = Org.get_by_id(org_uid) self.provider = self.org.provider_config.get() rsa_key, rsa_method = _get_partner_session_attrs(self.provider) request_token = OrgCredentials.get_by_id(self.org_uid, parent=self.org.key).token super(XeroTokenSession, self).__init__( self.provider.client_id, client_secret=self.provider.client_secret, resource_owner_key=callback_args['oauth_token'], resource_owner_secret=request_token.get('oauth_token_secret'), verifier=callback_args['oauth_verifier'], rsa_key=rsa_key, signature_method=rsa_method )
def test_oauth1(self, init_update_mock, save_token_mock, connected_mock, publish_mock): """ Tests oauth1 token session flow """ org = Org(id='test', redirect_url="http://app", provider_config=self.provider_configs['xerov2']).put() OrgCredentials(id='test', parent=org, token={ 'expires_at': 0, 'oauth_token': 'blah', 'oauth_token_secret': 'doggo' }).put() response = self.app.get( '/linker/test/oauth?oauth_verifier=123&oauth_token=blah') self.assertEqual(response.status_code, 302) # the user is redirected to the provider self.assertEqual(response.location, "http://app?data_source_name=ACUIT") # token is saved save_token_mock.assert_called_once() # and then org is connected (this publishes status as connected as well) connected_mock.assert_called_once() # and the initial sync has been kicked off init_update_mock.assert_called_once()
def test_mismatching_xero_file(self, fetch_token_mock, connected_mock, disconnected_mock): """ Verifies that a XeroOrg cannot be reconnected to a different file than its own. (i.e via mismatching ShortCodes) Args: fetch_token_mock (MagicMock): fetch_access_token mock """ token = { 'expires_at': 117, 'oauth_token': 'blah', 'oauth_token_secret': 'secret' } fetch_token_mock.return_value = token org = Org(id='test', redirect_url="http://app?app_state=blah", entity_id='vlg_pls_stop', provider_config=self.provider_configs['xerov2']).put() OrgCredentials(id='test', parent=org, token=token).put() response = self.app.get( '/linker/test/oauth?oauth_verifier=123&oauth_token=blah') # user is redirected to the app with an error message self.assertEqual(response.status_code, 302) connected_mock.assert_not_called() disconnected_mock.assert_called_once() self.assertEqual( response.location, "http://app?app_state=blah&error_code=source_mismatch")
def test_changeset_status_error(self, client_mock): """ Verifies the message published on pubsub when a changeset is in error. Args: client_mock(Mock): mock of the pubsub client """ Org(id='test').put() publish_changeset_status('test', 2, 'error') publish_mock = client_mock.return_value.topic.return_value.publish publish_mock.assert_called_with( json.dumps({ "meta": { "version": "2.0.0", "data_source_id": "test", "timestamp": "2010-01-01T00:00:00" }, "data": [{ "type": "changeset_sync_status", "id": "test_2", "attributes": { "status": "error", "changeset": 2, "synced_at": None } }] }))
def test_non_ascii_response(self, request_mock): """ Ensures that the client can handle non-ascii response body (can break due to logging for example). Args: request_mock(Mock): a mock of the response """ # setup an org org = Org(id='test', provider_config=self.test_provider_config).put() OrgCredentials(id='test', parent=org, token={ 'expires_at': 0, 'refresh_token': 'refresh' }).put() # non-200 and non-ascii response request_mock.return_value = Mock(status_code=400, text=u'te\xa0st') # there should be no exception session = QboApiSession('test') with self.assertRaises(ValueError): session.request('GET', 'http://testurl.com') # 200 and non-ascii response request_mock.return_value = Mock(status_code=200, text=u'{"value": "te\xa0st"}') # there should be no exception session = QboApiSession('test') self.assertDictEqual(session.request('GET', 'http://testurl.com'), {"value": u"te\xa0st"})
def test_is_authenticated(self, get_mock): """ Tests how QBO client's on-demand auth check. """ org = Org(id='test', provider_config=self.test_provider_config).put() OrgCredentials(id='test', parent=org, token={ 'expires_at': 0, 'refresh_token': 'refresh' }).put() # getting CompanyInfo means authenticated get_mock.return_value = {'CompanyInfo': {'CompanyName': 'jaja'}} qbo_session = QboApiSession('test') self.assertTrue(qbo_session.is_authenticated()) # no CompanyInfo means not authenticated get_mock.return_value = {'NotCompanyInfo': {}} self.assertFalse(qbo_session.is_authenticated()) # an exception means not authenticated get_mock.side_effect = UnauthorizedApiCallException() self.assertFalse(qbo_session.is_authenticated())
def reset_endpoints(): """ Handler which creates reset endpoint tasks for selected endpoints/orgs. """ endpoint_indexes = request.form.getlist('endpoint_index') org_uid = request.form.get('org_uid') if not endpoint_indexes: flash("At least one endpoint is required") return render_template('select_endpoints.html', endpoints=ENDPOINTS, org_uid=org_uid), 200 if org_uid: taskqueue.add(target='admin', url='/admin/reset_endpoints_task/{}'.format(org_uid), params={'endpoint_index': endpoint_indexes}) flash("Kicked off reset of {} endpoints for {}".format( len(endpoint_indexes), org_uid)) return redirect(prefix('/')) else: count = query_to_tasks( query=Org.query(Org.status == CONNECTED), queue=Queue('admin'), task_generator=lambda key: Task( url='/admin/reset_endpoints_task/{}'.format(key.string_id()), params={'endpoint_index': endpoint_indexes})) flash("Kicked off reset of {} endpoints for {} orgs".format( len(endpoint_indexes), count)) return redirect(prefix('/commands'))
def test_request(self, request_mock): """ Tests Xero API request error handling. Args: request_mock(Mock): mock of the xero api call response """ org = Org(id='test', provider_config=self.test_provider_config).put() OrgCredentials(id='test', parent=org, token={'expires_at': 0, 'oauth_token': 'token', 'oauth_token_secret': 'secret'}).put() session = XeroApiSession('test') # successful response data comes through request_mock.return_value = Mock(status_code=200, text='{"key": "value"}') data = session.get("https://xero") self.assertEqual(data, {"key": "value"}) # 401 response raises a custom exception request_mock.return_value = Mock(status_code=401) with self.assertRaises(UnauthorizedApiCallException): session.get("https://xero") request_mock.return_value = Mock(status_code=403) with self.assertRaises(ForbiddenApiCallException): session.get('https://xero') # non 200 and non 401 response raises an exception request_mock.return_value = Mock(status_code=500) with self.assertRaises(ValueError): session.get("https://xero")
def reset_endpoints_task(org_uid): """ Processes org reset task from the task queue (clears endpoint state to cause the next sync to fetch all the data, and creates a task on the update queue to kick of the sync cycle for the org). """ org = Org.get_by_id(org_uid) if (org.changeset_started_at and not org.changeset_completed_at) or org.update_cycle_active: logging.info("org syncing at the moment, will try again later") return '', 423 endpoint_indexes = request.form.getlist('endpoint_index') logging.info("resetting markers for org {} and endpoints {}".format( org_uid, endpoint_indexes)) # TODO: this is a hack, this should be delegated to a qbo class, instantiated via a factory from the org provider sync_data = QboSyncData.get_by_id(org_uid) if not sync_data: logging.warning("could not find sync data") return '', 204 for endpoint_index in [int(_index) for _index in endpoint_indexes]: sync_data.markers[endpoint_index] = START_OF_TIME sync_data.put() sync_utils.init_update(org_uid) return '', 204
def test_changeset_status_published(self, client_mock): """ Verifies the message published on pubsub when a changeset is synced. Args: client_mock(Mock): mock of the pubsub client """ Org(id='test').put() OrgChangeset(org_uid='test', changeset=2, publish_finished_at=datetime(2010, 1, 2)).put() publish_changeset_status('test', 2, 'synced') publish_mock = client_mock.return_value.topic.return_value.publish publish_mock.assert_called_with( json.dumps({ "meta": { "version": "2.0.0", "data_source_id": "test", "timestamp": "2010-01-01T00:00:00" }, "data": [{ "type": "changeset_sync_status", "id": "test_2", "attributes": { "status": "synced", "changeset": 2, "synced_at": "2010-01-02T00:00:00" } }] }))
def test_status_publish_connected(self, client_mock): """ Verifies the message published on pubsub when an org is connected. Args: client_mock(Mock): mock of the pubsub client """ Org(id='test', connected_at=datetime(2010, 1, 2)).put() publish_status('test', 'connection_status', 'connected') publish_mock = client_mock.return_value.topic.return_value.publish publish_mock.assert_called_with( json.dumps({ "meta": { "version": "2.0.0", "data_source_id": "test", "timestamp": "2010-01-01T00:00:00" }, "data": [{ "type": "connection_status", "id": "test", "attributes": { "status": "connected", "connected_at": "2010-01-02T00:00:00" } }] }))
def test_query_to_tasks(self): """ Verifies that multiple pages of tasks get queued up properly. """ Org(id='test1', status=CONNECTED).put() Org(id='test2', status=CONNECTED).put() Org(id='test3', status=DISCONNECTED).put() count = task_utils.query_to_tasks( query=Org.query(Org.status == CONNECTED), queue=Queue('adapter-update'), task_generator=lambda key: Task(url='/something/{}'.format( key.string_id()))) self.assertEqual(count, 2) task_count = len(self.taskqueue.get_filtered_tasks()) self.assertEqual(task_count, 2)
def test_mark_disconnected_deactivate(self, publish_status_mock): """ Verifies that an org can be marked as disconnected with flagging of update cycle as inactive. Args: publish_status_mock(Mock): pubsub publish function mock """ Org(id='test', status=CONNECTED, update_cycle_active=False).put() sync_utils.mark_as_disconnected(org_uid='test', deactivate_update_cycle=True) # status should be changed and new status broadcast on pubsub org = Org.get_by_id('test') self.assertEqual(org.status, DISCONNECTED) self.assertEqual(org.update_cycle_active, False) publish_status_mock.assert_called_with('test', 'connection_status', 'disconnected')
def basic_auth(provider, org_uid, username, password): """ Handles basic username/password auth flow. Users credentials (username/password) are stored in the UserCredentials kind TODO: This should be temporary! and only implemented in DEV until vault is integrated Args: provider(str): The provider org_uid(str): The org ID username(str): The username password(str): The password Returns: (str): Response text """ # If authenticating for Zuora, get a session cookie and store in OrgCredentials if provider == 'zuora': # Multi-entity may be enabled, we need to specify it as a header when authenticating # TODO: Fix this to work with multiple entities once its figured out how it works. entity_id = None session = client_factory.get_token_session(provider, org_uid, username, password) try: session.get_and_save_token() except UnauthorizedApiCallException: logging.info("got an error - Invalid Credentials".format(provider)) _abort_link(org_uid) return _respond(Org.get_by_id(org_uid), {'error_code': 'invalid_credentials'}, 'not okidoki') mark_as_connected(org_uid=org_uid, also_linked=True) try: data_source_name = client_factory.get_api_session( provider, org_uid).get_company_name() except FailedToGetCompanyName: # TODO: this should be sent to the client as an error code rather than an empty name data_source_name = None init_update(org_uid) return _respond(Org.get_by_id(org_uid), {'data_source_name': data_source_name}, 'okidoki')
def test_basic_auth_creds_provided_by_apigee(self, init_update_mock, save_token_mock, connected_mock, publish_mock): """ Verifies that linking works correctly when user credentials are supplied from apigee via the connect endpoint """ org = Org(provider='zuora', id='test', redirect_url="http://app", provider_config=self.provider_configs['zuora']).put() OrgCredentials(id='test', parent=org, token={ 'expires_at': 0, 'access_token': 'blah' }).put() response = self.app.post( '/linker/zuora/test/connect?redirect_url=http://app&app_family=local_host_family', json={ 'username': '******', 'password': '******' }, content_type='application/json') self.assertEqual(response.status_code, 302) # the user is redirected to the provider self.assertEqual(response.location, 'http://app?data_source_name=ACUIT') # org is linking and status is published on pubsub self.assertEqual(Org.get_by_id('test').status, LINKING) # app redirect url is saved self.assertEqual(Org.get_by_id('test').redirect_url, "http://app") # token is saved save_token_mock.assert_called_once() # and then org is connected (this publishes status as connected as well) connected_mock.assert_called_once() # and the initial sync has been kicked off init_update_mock.assert_called_once()
def test_connect(self, publish_mock): """ Tests the first step of the oauth flow authorisation. """ response = self.app.post( '/linker/qbo/test/connect?redirect_url=http://app&app_family=local_host_family' ) self.assertEqual(response.status_code, 302) # the user is redirected to the provider self.assertEqual(response.location, "http://qbo") # org is linking and status is published on pubsub self.assertEqual(Org.get_by_id('test').status, LINKING) # app redirect url is saved self.assertEqual(Org.get_by_id('test').redirect_url, "http://app")
def perform_disconnect(org_uid): logging.info("disconnecting the org explicitly") org = Org.get_by_id(org_uid) if not org: logging.info("org {} not found".format(org_uid)) raise NotFoundException("org {} not found".format(org_uid)) publish_status(org_uid, LINK_STATUS_TYPE, LINK_STATUS_UNLINKED) mark_as_disconnected(org_uid=org_uid, deactivate_update_cycle=False)
def __init__(self, org_uid): """ Initialises the class. Args: org_uid(str): org identifier """ self.org_uid = org_uid self.sync_data = QboSyncData.get_by_id(org_uid) self.org = Org.get_by_id(org_uid) self.entity_id = self.org.entity_id
def test_complete_later_changeset(self): """ Check that no new task is queued up after a non-initial sync. """ org = Org(id='test', changeset=1, changeset_started_at=datetime.utcnow()).put() sync_utils.complete_changeset('test') # No task queued self.assertEqual(len(self.taskqueue.get_filtered_tasks()), 0)
def __init__(self, org_uid, provider_config, redirect_url): """ Prepares the org for linking. Args: org_uid(str): org identifier provider_config(ProviderConfig): ndb model holding the provider config for the org redirect_url(str): the url to which the linker should send the user to after saving xero tokens """ org = Org.get_by_id(org_uid) or Org(id=org_uid, provider='xerov2', provider_config=provider_config.key) # If this is a `relink`, check the org has a provider_config set if org.provider_config is None: org.provider_config = provider_config.key logging.info( "Provider secret = {} provider id {}".format( provider_config.client_secret, provider_config.client_id ) ) msg = "setting org status to linking (status {}) and saving redirect_url ({})'" logging.info(msg.format(LINKING, redirect_url)) org.status = LINKING org.redirect_url = redirect_url org.put() rsa_key, rsa_method = _get_partner_session_attrs(provider_config) callback_uri = client_utils.get_redirect_uri_for(org.provider, org_uid) self.org_uid = org_uid super(XeroAuthorizationSession, self).__init__( client_key=provider_config.client_id, client_secret=provider_config.client_secret, callback_uri=callback_uri, rsa_key=rsa_key, signature_method=rsa_method )