def total_users(self, s_date, e_date): key = 'total_users_' + s_date.strftime('%Y-%m-%d') + e_date.strftime('%Y-%m-%d') visitors_from_cache = memcache.get(key) if visitors_from_cache: return visitors_from_cache total_users = User.count() + 470 interval = {} val = 0 result = [] # s_date = datetime.strptime(start_date, '%Y-%m-%d') # e_date = datetime.strptime(end_date, '%Y-%m-%d') days = (e_date - s_date).days + 1 for item in range(0, days): date_str = (e_date - timedelta(days=item)).strftime('%Y-%m-%d') interval.update({date_str: 0}) for user in User.all(): if s_date > user.created_at: break if user.created_at > e_date: interval[e_date.strftime('%Y-%m-%d')] += 1 continue interval[user.created_at.strftime('%Y-%m-%d')] += 1 for item in range(0, days): date_str = (e_date - timedelta(days=item)).strftime('%Y-%m-%d') val += interval.get(date_str) result.append([total_users - val, date_str]) result.reverse() memcache.add(key, result, 180) return result
def total_users(self, s_date, e_date): key = 'total_users_' + s_date.strftime('%Y-%m-%d') + e_date.strftime( '%Y-%m-%d') visitors_from_cache = memcache.get(key) if visitors_from_cache: return visitors_from_cache total_users = User.count() + 470 interval = {} val = 0 result = [] # s_date = datetime.strptime(start_date, '%Y-%m-%d') # e_date = datetime.strptime(end_date, '%Y-%m-%d') days = (e_date - s_date).days + 1 for item in range(0, days): date_str = (e_date - timedelta(days=item)).strftime('%Y-%m-%d') interval.update({date_str: 0}) for user in User.all(): if s_date > user.created_at: break if user.created_at > e_date: interval[e_date.strftime('%Y-%m-%d')] += 1 continue interval[user.created_at.strftime('%Y-%m-%d')] += 1 for item in range(0, days): date_str = (e_date - timedelta(days=item)).strftime('%Y-%m-%d') val += interval.get(date_str) result.append([total_users - val, date_str]) result.reverse() memcache.add(key, result, 180) return result
def it_keeps_iterating_if_next_link(self): page1 = page_of_users(include_next_link=True) page2 = page_of_users(include_next_link=False) side_effect = [page1, page2] with patch.object(Intercom, 'get', side_effect=side_effect) as mock_method: # noqa emails = [user.email for user in User.all()] eq_([call('/users'), call('https://api.intercom.io/users?per_page=50&page=2')], # noqa mock_method.mock_calls) eq_(emails, ['*****@*****.**', '*****@*****.**', '*****@*****.**'] * 2) # noqa
def it_keeps_iterating_if_next_link(self): page1 = page_of_users(include_next_link=True) page2 = page_of_users(include_next_link=False) side_effect = [page1, page2] with patch.object(Intercom, "get", side_effect=side_effect) as mock_method: # noqa emails = [user.email for user in User.all()] eq_( [call("/users"), call("https://api.intercom.io/users?per_page=50&page=2")], # noqa mock_method.mock_calls, ) eq_(emails, ["*****@*****.**", "*****@*****.**", "*****@*****.**"] * 2) # noqa
def fetch_all_users(context): data = [] for user in User.all(): event = { 'picture_url': user.avatar.image_url, 'companies': user.companies, 'created_at': user.signed_up_at.isoformat("T"), 'intercom_id': user.id, 'session_count': user.session_count, 'tags': map(lambda tag: tag.id, user.tags), 'segments': map(lambda segment: segment.id, user.segments), 'unsubscribed_from_emails': user.unsubscribed_from_emails, 'updated_at': user.updated_at.isoformat("T"), '_user_agent': user.user_agent_data } event.update(user.location_data.attributes) data.append({ 'id': user.user_id or user.email or user.id, 'set_properties': event }) if False: response = requests.get( "https://api.intercom.io/events?type=user&intercom_user_id=" + user.id, auth=HTTPBasicAuth(context.get('intercom_app_id'), context.get('intercom_api_key')), headers={'Accept': 'application/json'}) if response.status_code == 200: event_data = response.json() for event in event_data.get('events'): pass response = requests.post( context.get('rakam_api_url') + "/user/batch_operations", json.dumps({ 'api': { 'api_key': context.get('rakam_write_key'), 'library': { 'name': 'rakam-task-intercom', 'version': '0.1' } }, 'data': data })) if response.status_code != 200: print('[{}] Invalid status code from Rakam {} with response {}'.format( 'user', response.status_code, response.text)) else: print("{} users are updated".format(len(data)))
def test_iterate(self): # Iterate over all users for user in User.all(): self.assertTrue(user.id is not None)
def it_supports_indexed_array_access(self): body = page_of_users(include_next_link=False) with patch.object(Intercom, "get", return_value=body) as mock_method: eq_(User.all()[0].email, "*****@*****.**") mock_method.assert_called_once_with("/users")
def it_stops_iterating_if_no_next_link(self): body = page_of_users(include_next_link=False) with patch.object(Intercom, "get", return_value=body) as mock_method: emails = [user.email for user in User.all()] mock_method.assert_called_once_with("/users") eq_(emails, ["*****@*****.**", "*****@*****.**", "*****@*****.**"]) # noqa
def test_users(): httpretty.register_uri(get, r(r"/v1/users"), body=fixture('v1-users')) ok_(len(User.all()) > 0)
def it_supports_indexed_array_access(self): body = page_of_users(include_next_link=False) with patch.object(Intercom, 'get', return_value=body) as mock_method: eq_(User.all()[0].email, '*****@*****.**') mock_method.assert_called_once_with('/users')
def it_stops_iterating_if_no_next_link(self): body = page_of_users(include_next_link=False) with patch.object(Intercom, 'get', return_value=body) as mock_method: emails = [user.email for user in User.all()] mock_method.assert_called_once_with('/users') eq_(emails, ['*****@*****.**', '*****@*****.**', '*****@*****.**']) # noqa
def it_returns_a_collectionproxy_for_all_without_making_any_requests(self): with mock.patch('intercom.Request.send_request_to_path', new_callable=mock.NonCallableMock): # noqa res = User.all() self.assertIsInstance(res, CollectionProxy)
from intercom import Intercom Intercom.app_id = 'uqg4fg4p' Intercom.api_key = 'd314ecea83ae73e9e7a5605f3321dd7156bc08bd' from intercom import User for user in User.all(): print user.email