def setUp(self): """Set up test fixtures.""" # Create a WSGI Application app = webapp2.WSGIApplication(matchmaking.routes) # Wrap app in WebTest's TestApp self.testapp = webtest.TestApp(app) # First, create an instance of the Testbed class. self.testbed = testbed.Testbed() # Then activate the testbed, which prepares the service stubs for use. self.testbed.activate() # Next, declare which service stubs you want to use. self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() # Clear ndb's in-context cache between tests. # This prevents data from leaking between tests. # Alternatively, you could disable caching by # using ndb.get_context().set_cache_policy(False) ndb.get_context().clear_cache() self.lobby_names = ["glau", "julian", "patrick", "rchen93", "xtrina", "kailin"] self.started = ["grace", "neerav"] util_test.create_lobbies(self.lobby_names) util_test.create_lobbies(self.started, True) self.full = ["kailin"] util_test.fill_lobbies(self.full)
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.event = Event( id="2010sc", name="Palmetto Regional", event_type_enum=EventType.REGIONAL, short_name="Palmetto", event_short="sc", year=2010, end_date=datetime.datetime(2010, 03, 27), official=True, location="Clemson, SC", start_date=datetime.datetime(2010, 03, 24), ) self.event.put() self.match = Match( id="2010sc_qm1", alliances_json="""{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""", comp_level="qm", event=self.event.key, year=2010, set_number=1, match_number=1, team_key_names=[u"frc69", u"frc571", u"frc176", u"frc3464", u"frc20", u"frc1073"], ) self.match.put()
def test_delete_db_ndb_mixed(self): # Start empty storage_ndb = StorageByKeyName( CredentialsNDBModel, 'foo', 'credentials') storage = StorageByKeyName( CredentialsModel, 'foo', 'credentials') # First DB, then NDB self.assertEqual(None, storage.get()) storage.put(self.credentials) self.assertNotEqual(None, storage.get()) storage_ndb.delete() self.assertEqual(None, storage.get()) # First NDB, then DB self.assertEqual(None, storage_ndb.get()) storage_ndb.put(self.credentials) storage.delete() self.assertNotEqual(None, storage_ndb.get()) # NDB uses memcache and an instance cache (Context) ndb.get_context().clear_cache() memcache.flush_all() self.assertEqual(None, storage_ndb.get())
def post(self): request_data = json.loads(self.request.body) logging.info(request_data) player = current_user_player() # VALIDATION if not validate_request_data(self.response, request_data, ['team_name']): return elif not validate_logged_inn(self.response): return elif not _validate_has_no_team(self.response, player): return # REGISTER TEAM new_team = Team( name=request_data['team_name'], owner=player.key ).put().get() # JOIN TEAM WITH PLAYER player.team = new_team.key player.put() ndb.get_context().clear_cache() # Required to get the new player as part of the get_data set_json_response(self.response, {'team': new_team.get_data('full')})
def setUp(self): self.api = TicTacToeApi() self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # root_path must be set the the location of queue.yaml. # Otherwise, only the 'default' queue will be available. self.testbed.init_taskqueue_stub() # root_path=os.path.join(os.path.dirname(__file__), 'resources')) self.taskqueue_stub = self.testbed.get_stub( testbed.TASKQUEUE_SERVICE_NAME) self.user = User(name="lisa", email="abc@xyz", wins=0, total=0, rate=0) self.user.put() self.gameToAdd = Game( # target = 1, # attempts_allowed=1, # attempts_remaining=1, game_over=False, user=self.user.key, board="--O-X----" ) self.gameToAdd.put()
def clearExistingDB() : allwomqueries = WOM.query().fetch(9999, keys_only = True) allwomqueries_ENG = WOM_ENG.query().fetch(9999, keys_only = True) #'_multi' 함수의 인자인 keys 를 만들기 위해서는 keys_only 옵션을 이용해서 entity가 아니라 key만 return 되도록 해야 한다. ndb.delete_multi(allwomqueries) ndb.delete_multi(allwomqueries_ENG) ndb.get_context().clear_cache()
def setUp(self): app = webapp2.WSGIApplication([webapp2.Route(r'/<year:>', ApiEventListController, methods=['GET'])], debug=True) self.testapp = webtest.TestApp(app) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_urlfetch_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.testbed.init_taskqueue_stub(root_path=".") self.event = Event( id="2010sc", name="Palmetto Regional", event_type_enum=EventType.REGIONAL, short_name="Palmetto", event_short="sc", year=2010, end_date=datetime(2010, 03, 27), official=True, city="Clemson", state_prov="SC", country="USA", start_date=datetime(2010, 03, 24), ) self.event.put()
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.testbed.init_user_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests app = webapp2.WSGIApplication([ RedirectRoute(r'/request/apiwrite', SuggestApiWriteController, 'request-apiwrite', strict_slash=True), ], debug=True) self.testapp = webtest.TestApp(app) self.event = Event( id="2016necmp", name="New England District Championship", event_type_enum=EventType.OFFSEASON, event_district_enum=DistrictType.NEW_ENGLAND, short_name="New England", event_short="necmp", year=2016, end_date=datetime(2016, 03, 27), official=False, city='Hartford', state_prov='CT', country='USA', venue="Some Venue", venue_address="Some Venue, Hartford, CT, USA", timezone_id="America/New_York", start_date=datetime(2016, 03, 24), webcast_json="[{\"type\": \"twitch\", \"channel\": \"frcgamesense\"}]", website="http://www.firstsv.org", ) self.event.put()
def setUp(self): """Setup the testbed for each test class.""" # First, create an instance of the Testbed class. self.testbed = testbed.Testbed() # Then activate the testbed, which prepares the service stubs for use. self.testbed.activate() # Next, declare which service stubs you want to use. self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() # Clear ndb's in-context cache between tests. # This prevents data from leaking between tests. # Alternatively, you could disable caching by # using ndb.get_context().set_cache_policy(False) ndb.get_context().clear_cache() self.assertTrue(BAD_PUB_PRI_KEY is not FAKE_PUBLIC_KEY) self.assertTrue(BAD_PUB_PRI_KEY is not FAKE_PRIVATE_KEY) # I have to define the keys here once the module is loaded and # stubbed so that the call to ndb.Key is the stubbed version. global FAKE_KEY, FAKE_KEY_URLSAFE, FAKE_USER # noqa FAKE_KEY = ndb.Key(datastore.User, FAKE_EMAIL) FAKE_KEY_URLSAFE = FAKE_KEY.urlsafe() FAKE_USER = datastore.User(key=FAKE_KEY, email=FAKE_EMAIL, name=FAKE_NAME, public_key=FAKE_PUBLIC_KEY, private_key=FAKE_PRIVATE_KEY, is_key_revoked=False) global BAD_KEY, BAD_KEY_URLSAFE, USER_BAD_KEY # noqa BAD_KEY = ndb.Key(datastore.User, BAD_EMAIL) BAD_KEY_URLSAFE = BAD_KEY.urlsafe() USER_BAD_KEY = datastore.User(key=BAD_KEY, email=BAD_EMAIL, name=FAKE_NAME, public_key=BAD_PUB_PRI_KEY, private_key=BAD_PUB_PRI_KEY, is_key_revoked=False)
def poll_shout_status(browser_id, shout_id, last_status): """Poll datastore waiting for the shout task to complete. Why not return the status immediately? That would work too, but then the browser would be constantly sending new HTTP requests to check to see if its task is done. That would consume the users' bandwidth and battery life. Why not poll until the task is complete? Why timeout after 45 seconds? Because App Engine will terminate any HTTP request that doesn't complete in 60 seconds. We have to return something before that deadline. Args: name: string, the name of the request. last_status: string, the last status observed by the user. deadline: datetime.datetime, when this request should time out. Returns: A flask http response. """ response = {'shoutId': shout_id, 'status': last_status} start_timestamp = time.time() ndb.get_context().set_cache_policy(False) sleep_seconds = 0.1 while True: # Look up the current status in datastore. q = (ShoutStatusLog.query() .filter(ShoutStatusLog.combined_shout_id == combine_ids( browser_id, shout_id)) .order(-ShoutStatusLog.status)) entities = q.fetch(1) if entities: entity = entities[0] status = response['status'] = entity.status_name if status == 'success': response['result'] = entity.result return json.dumps(response) if status == 'fatal': response['error'] = entity.error return json.dumps(response) if last_status != status: # State changed, notify user. response['error'] = entity.error break # Retry with exponential backoff, for a maximum of 45 seconds. if time.time() - start_timestamp >= 45: break else: time.sleep(sleep_seconds) # Retry after small wait. sleep_seconds = min(5, sleep_seconds * 2) response['nextLink'] = { 'target': 'shout_status', 'method': 'POST', 'token': werkzeug.urls.url_encode({ 'browserId': browser_id, 'shoutId': shout_id, 'status': response['status'] })} return json.dumps(response), 202
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() self.initData()
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_urlfetch_stub() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.event = Event( id="2014casj", event_short="casj", event_type_enum=EventType.REGIONAL, name="Silicon Valley Regional", start_date=datetime.datetime(2014, 2, 27, 0, 0), end_date=datetime.datetime(2014, 3, 1, 0, 0), year=2014, timezone_id="America/New_York", ) self.event_dst = Event( id="2014casj", event_short="casj", event_type_enum=EventType.REGIONAL, name="Silicon Valley Regional", start_date=datetime.datetime(2014, 3, 8, 0, 0), end_date=datetime.datetime(2014, 3, 9, 0, 0), # chosen to span DST change year=2014, timezone_id="America/Los_Angeles", )
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_urlfetch_stub() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.account = Account.get_or_insert( "123", email="*****@*****.**", registered=True) self.account.put() self.account_banned = Account.get_or_insert( "456", email="*****@*****.**", registered=True, shadow_banned=True, ) self.account_banned.put() event = Event(id="2016test", name="Test Event", event_short="Test Event", year=2016, event_type_enum=EventType.OFFSEASON) event.put() self.match = Match(id="2016test_f1m1", event=ndb.Key(Event, "2016test"), year=2016, comp_level="f", set_number=1, match_number=1, alliances_json='') self.match.put()
def setUp(self): self.testbed = testbed.Testbed() # Then activate the testbed, which prepares the service stubs for use. self.testbed.activate() # Next, declare which service stubs you want to use. self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() # Clear ndb's in-context cache between tests. # This prevents data from leaking between tests. # Alternatively, you could disable caching by # using ndb.get_context().set_cache_policy(False) ndb.get_context().clear_cache() # # # #requires_permission._DEBUG = True d = {'realm': 'google' ,'email': '*****@*****.**' ,'token': '6666' ,'user_id': 'id' ,'name_first': 'first' ,'name_last': 'last' ,'roles': [ Admin ] } self.u = duser.create(**d)
def setUp(self): # create an instance of the Testbed class. self.testbed = testbed.Testbed() # Then activate the testbed, which prepares the service stubs for use. self.testbed.activate() # Create a consistency policy that will simulate the High Replication consistency model. self.policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy(probability=1) self.testbed.init_datastore_v3_stub( consistency_policy=self.policy, # Set require_indexes to false to automatically add indexes to index.yaml # NOTE: root_path must also be set require_indexes=False, root_path=_parentDir ) # declare other service stubs self.testbed.init_memcache_stub() self.testbed.init_user_stub() self.testbed.init_taskqueue_stub() self.taskqueue_stub = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME) self.testbed.init_mail_stub() # Clear ndb's in-context cache between tests. # This prevents data from leaking between tests. # Alternatively, you could disable caching by # using ndb.get_context().set_cache_policy(False) ndb.get_context().clear_cache()
def get_and_store_all_new_messages_async(user_id='me', query='', page_token=None): ndb.get_context().clear_cache() gmail_service = build_gmail_service_for_user(user_id) response = gmail_service.get_page_of_messages(user_id, query, page_token) all_messages_from_page = response['messages'] next_page_token = response.get("nextPageToken") new_messages = gmail_service.get_new_messages_from_messages(user_id, all_messages_from_page) if not page_token and new_messages and len(new_messages)>0: #This is the start of the job, start the stats counter and log logging.debug("Starting incremental fetch job for user %s", User.get_user_email_from_id(user_id)) if not new_messages or len(new_messages)==0: logging.debug("Did not see any more new messages for user: %s", User.get_user_email_from_id(user_id)) cached_stats = IncrementalJob.query(IncrementalJob.user_id==user_id).get() if cached_stats: cached_stats.started = False cached_stats.put() return gmail_service.fetch_messages(user_id, new_messages) # store_messages(user_id, new_message_contents) if next_page_token: try: logging.debug("Enqueuing next page for %s with new page token: %s", user_id, next_page_token) deferred.defer(get_and_store_all_new_messages_async, user_id, query, next_page_token, _queue="emailFetch") except DuplicateTaskNameError: logging.error("Duplicate task name exception") else: #This is the end of the job, end the stats counter cached_stats = IncrementalJob.query(IncrementalJob.user_id==user_id).get() cached_stats.started = False cached_stats.put() logging.debug("Ending incremental fetch job for user %s", User.get_user_email_from_id(user_id))
def setUp(self): print '\nSetting up test environment for: ' + self.__str__().split(' ', 1)[0] self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache()
def setUp(self): self.testbed = testbed.Testbed() # Then activate the testbed, which prepares the service stubs for use. self.testbed.activate() # Next, declare which service stubs you want to use. self.testbed.init_datastore_v3_stub() # enable memcache self.testbed.init_memcache_stub() self.testbed.init_user_stub(enable=True) self.testbed.init_search_stub(enable=True) self.testbed.setup_env( USER_EMAIL='*****@*****.**', USER_ID='123', USER_IS_ADMIN='1', overwrite=True) # Clear ndb's in-context cache between tests. # This prevents data from leaking between tests. # Alternatively, you could disable caching by # using ndb.get_context().set_cache_policy(False) ndb.get_context().clear_cache() self.tags = Tags() self.categories = Categories() self.posts = Posts() self.form = PostForm() self.uploadform = UploadForm()
def setUp(self): app = webapp2.WSGIApplication([webapp2.Route(r'/<team_key:>', ApiTeamController, methods=['GET'])], debug=True) self.testapp = webtest.TestApp(app) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_urlfetch_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.testbed.init_taskqueue_stub(root_path=".") self.team = Team( id="frc281", name="Michelin / Caterpillar / Greenville Technical College /\ jcpenney / Baldor / ASME / Gastroenterology Associates /\ Laserflex South & Greenville County Schools & Greenville\ Technical Charter High School", team_number=281, rookie_year=1999, nickname="EnTech GreenVillians", city="Greenville", state_prov="SC", country="USA", website="www.entech.org", motto="Infiltrating Young Minds One Robot at a Time" ) self.team.put()
def setUp(self): self.policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy( probability=1) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub(consistency_policy=self.policy) self.testbed.init_memcache_stub() self.testbed.init_user_stub() self.testbed.init_urlfetch_stub() self.testbed.init_taskqueue_stub(_all_queues_valid=True) ndb.get_context().clear_cache( ) # Prevent data from leaking between tests app = webapp2.WSGIApplication([ RedirectRoute( r'/mod/redeem', TeamAdminRedeem, 'team-admin', strict_slash=True), ]) self.testapp = webtest.TestApp(app) self.team = Team( id="frc1124", name="Team", team_number=1124, ) self.team.put() self.now = datetime.datetime.now()
def setUp(self): app = webapp2.WSGIApplication([webapp2.Route(r'/<team_key:>', ApiTeamHistoryDistrictsController, methods=['GET'])], debug=True) self.testapp = webtest.TestApp(app) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_urlfetch_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.testbed.init_taskqueue_stub(root_path=".") self.team = Team( id="frc1124", name="UberBots", team_number=1124, nickname="UberBots" ) self.district_team = DistrictTeam( id="2015ne_frc1124", team=self.team.key, year=2015, district_key=ndb.Key(District, '2015ne') ) self.district = District( id='2015ne', year=2015 ) self.team.put() self.district_team.put() self.district.put()
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.event_nyny = Event( id="2016nyny", name="NYC Regional", event_type_enum=EventType.REGIONAL, short_name="NYC", event_short="nyny", year=2016, end_date=datetime(2016, 03, 27), official=True, start_date=datetime(2016, 03, 24), timezone_id="America/New_York" ) self.event_nyny.put() self.event_micmp = Event( id="2016micmp", name="Michigan District Champs", event_type_enum=EventType.DISTRICT_CMP, short_name="Michigan", event_short="micmp", year=2016, end_date=datetime(2016, 03, 27), official=True, start_date=datetime(2016, 03, 24), timezone_id="America/New_York", playoff_type=PlayoffType.BRACKET_16_TEAM ) self.event_micmp.put()
def post(self): request_data = json.loads(self.request.body) logging.info(request_data) player = current_user_player() # VALIDATION if not validate_request_data(self.response, request_data, ['type']): return if player.doing: error_400(self.response, "ERROR_PLAYER_BUSY", "Player is busy.") return # JOIN QUEUE match_queue_key = MatchSoloQueue( player=player.key, type=request_data['type'] ).put() player.doing = match_queue_key player.put() ndb.get_context().clear_cache() # If it is not cleared the following count of queued players wont count this match_queue = match_queue_key.get() self._notify_players_new_queue_size(match_queue.type) set_json_response(self.response, {'doing': match_queue.get_data()})
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests Sitevar(id='website_blacklist', values_json=json.dumps({'websites': ['http://blacklist.com/']})).put()
def setUp(self): root = dirname('..') self.policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy(probability=1.0) self.app = app self.testapp = TestApp(self.app) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_taskqueue_stub(root_path=root) self.testbed.init_memcache_stub() self.testbed.init_datastore_v3_stub(root_path=root, consistency_policy=self.policy) self.testbed.init_user_stub() self.blob_storage = file_blob_storage.FileBlobStorage('/tmp/testbed.blobstore', testbed.DEFAULT_APP_ID) self.testbed._register_stub('blobstore', blobstore_stub.BlobstoreServiceStub(self.blob_storage)) self.testbed._register_stub('file', file_service_stub.FileServiceStub(self.blob_storage)) try: from google.appengine.api.images import images_stub self.testbed._register_stub('images', images_stub.ImagesServiceStub()) except: pass if self.CUSTOM_URLFETCH: self._url_fetch_mock = URLFetchServiceMock() apiproxy_stub_map.apiproxy.RegisterStub('urlfetch', self._url_fetch_mock) else: self._url_fetch_mock = None self.testbed.init_urlfetch_stub() self.taskqueue_stub = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME) ndb.get_context().set_cache_policy(lambda key: False) mixpanel.DONT_FLUSH_QUEUE = True mixpanel.clear()
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub(User) self.testbed.init_memcache_stub() ndb.get_context().clear_cache() self.mydb = DAL() self.obj = SearchEventsUsingAPI()
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_memcache_stub() self.testbed.init_datastore_v3_stub() self.testbed.init_mail_stub() self.mail_stub = self.testbed.get_stub(testbed.MAIL_SERVICE_NAME) ndb.get_context().clear_cache()
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.testbed.init_taskqueue_stub(root_path=".")
def setUp(self): self.event = Event(id="2013test", event_short="test", year=2013) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests
def get(url): context = ndb.get_context() headers = { 'accept-encoding': 'gzip, *', 'x-goog-api-version': '2', } url_result = urlparse.urlparse(url) if url_result.netloc.endswith('.googleapis.com'): auth_token, _ = app_identity.get_access_token( 'https://www.googleapis.com/auth/cloud-platform') if auth_token: headers['Authorization'] = 'OAuth %s' % auth_token for retry in xrange(6): result = yield context.urlfetch(url, headers=headers) status = result.status_code if status == 429 or 500 <= status < 600: yield ndb.sleep(2**retry) continue if status in (200, 206): content = result.content if result.headers.get('content-encoding') == 'gzip': dec = zlib.decompressobj(15 | 16) content = dec.decompress(result.content, MAX_SIZE) if dec.unconsumed_tail: logging.warning( 'only decompressed %d KB, %d KB remain in buffer.', len(content) / 1024, len(dec.unconsumed_tail) / 1024) raise ndb.Return(content) logging.error("unable to fetch '%s': status code %d", url, status) raise ndb.Return(None)
def week(self): """ Returns the week of the event relative to the first official season event as an integer Returns None if the event is not of type NON_CMP_EVENT_TYPES or is not official """ if self.event_type_enum not in EventType.NON_CMP_EVENT_TYPES or not self.official: return None # Cache week_start for the same context cache_key = '{}_week_start:{}'.format(self.year, ndb.get_context().__hash__()) week_start = context_cache.get(cache_key) if week_start is None: e = Event.query( Event.year == self.year, Event.event_type_enum.IN(EventType.NON_CMP_EVENT_TYPES), Event.start_date != None).order(Event.start_date).fetch( 1, projection=[Event.start_date]) if e: first_start_date = e[0].start_date diff_from_wed = (first_start_date.weekday() - 2) % 7 # 2 is Wednesday week_start = first_start_date - datetime.timedelta( days=diff_from_wed) else: week_start = None context_cache.set(cache_key, week_start) if self._week is None and week_start is not None: days = (self.start_date - week_start).days self._week = days / 7 return self._week
def testNoToken(self): api = rest_api._RestApi('scope') self.assertEqual(api.scopes, ['scope']) fut_get_token = ndb.Future() fut_get_token.set_result(None) api.get_token_async = mock.create_autospec(api.get_token_async, return_value=fut_get_token) fut_urlfetch = ndb.Future() fut_urlfetch.set_result( test_utils.MockUrlFetchResult(200, {'foo': 'bar'}, 'yoohoo')) ctx_urlfetch = mock.Mock(return_value=fut_urlfetch) ndb.get_context().urlfetch = ctx_urlfetch res = api.do_request('http://example.com') self.assertEqual(res, (200, {'foo': 'bar'}, 'yoohoo')) ctx_urlfetch.assert_called_once_with( 'http://example.com', headers={'User-Agent': 'AppEngine-Python-GCS'}, follow_redirects=False, payload=None, method='GET', deadline=None, callback=None)
def testTokenSaved(self): retry_params = api_utils.RetryParams(save_access_token=True) api = rest_api._RestApi('scope', retry_params=retry_params) t1 = api.get_token() self.assertNotEqual(None, t1) api = rest_api._RestApi('scope', retry_params=retry_params) t2 = api.get_token() self.assertEqual(t2, t1) memcache.flush_all() ndb.get_context().clear_cache() api = rest_api._RestApi('scope', retry_params=retry_params) t3 = api.get_token() self.assertEqual(t3, t1)
def testBasicCallWithUserAgent(self): user_agent = 'Test User Agent String' retry_params = api_utils.RetryParams(_user_agent=user_agent) api = rest_api._RestApi('scope', retry_params=retry_params) self.assertEqual(api.scopes, ['scope']) fut_get_token = ndb.Future() fut_get_token.set_result('blah') api.get_token_async = mock.create_autospec(api.get_token_async, return_value=fut_get_token) fut_urlfetch = ndb.Future() fut_urlfetch.set_result( test_utils.MockUrlFetchResult(200, {'foo': 'bar'}, 'yoohoo')) ctx_urlfetch = mock.Mock(return_value=fut_urlfetch) ndb.get_context().urlfetch = ctx_urlfetch res = api.do_request('http://example.com') self.assertEqual(res, (200, {'foo': 'bar'}, 'yoohoo')) ctx_urlfetch.assert_called_once_with( 'http://example.com', headers={'authorization': 'OAuth blah', 'User-Agent': user_agent}, follow_redirects=False, payload=None, method='GET', deadline=None, callback=None)
def setUp(self): # First, create an instance of the Testbed class. self.testbed = testbed.Testbed() # Then activate the testbed, which prepares the service stubs for use. self.testbed.activate() # Next, declare which service stubs you want to use. self.testbed.init_user_stub() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.testbed.init_user_stub() self.testbed.init_taskqueue_stub() # Clear ndb's in-context cache between tests. # This prevents data from leaking between tests. # Alternatively, you could disable caching by # using ndb.get_context().set_cache_policy(False) ndb.get_context().clear_cache()
def setUp(self): environment.TEST = True lru_clear_all() if testbed: self.testbed = testbed.Testbed() self.testbed.setup_env(current_version_id='testbed.version') self.testbed.activate() # init stubs self.testbed.init_logservice_stub() self.testbed.init_memcache_stub() self.testbed.init_app_identity_stub() self.policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy(probability=1) self.testbed.init_datastore_v3_stub(consistency_policy=self.policy) self.testbed.init_blobstore_stub() self.testbed.init_mail_stub() # get stubs self.task_queue_stub = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME) self.mail_stub = self.testbed.get_stub(testbed.MAIL_SERVICE_NAME) # disable ndb cache context = ndb.get_context() context.set_cache_policy(lambda key: False) context.set_memcache_policy(lambda key: False) # api mock self.api_mock = ApiMock()
def _rate_limiter(self): """ Rate limiting for bots & save resources The reason we use 503 is because webapp2 does not have 429 code in it's supported codes because it's not official status code yet """ user_agent = self.request.headers.get('User-Agent', 'Googlebot') robot = filter(lambda bot: user_agent.find(bot) != -1, BOT_USER_AGENTS) if robot: # use current minute cache ctx = ndb.get_context() cache_id = 'rate_limiter_{}'.format( hashlib.md5(robot.pop()).hexdigest()) request_count = ctx.memcache_incr(cache_id, initial_value=0).get_result() if request_count >= config.rate_limit[0]: self.abort(503) elif request_count == 1: ctx.memcache_set(cache_id, request_count, config.rate_limit[1]) else: # rate limiters for non bots non logged users request_count, time_started = self.session.get( 'rate_limiter_request', (0, datetime.now())) request_count += 1 seconds = ( datetime.now() - time_started).seconds if datetime.now() > time_started else 0 if seconds > config.rate_limit[1]: request_count, time_started = (0, datetime.now()) elif request_count >= config.rate_limit[ 0] and seconds < config.rate_limit[1]: self.abort(503) self.session['rate_limiter_request'] = (request_count, time_started)
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.testbed.init_taskqueue_stub(root_path=".") for team_number in range(6): Team(id="frc%s" % team_number, team_number=team_number).put() self.event = EventTestCreator.createPresentEvent() self.match = self.event.matches[0] self.notification = MatchVideoNotification(self.match)
def send_to_api(self, path, post, access_token): ctx = ndb.get_context() try: resp = yield ctx.urlfetch('https://alpha-api.app.net/stream/0/%s' % path, payload=json.dumps(post), deadline=30, method='POST', headers={ 'Authorization': 'Bearer %s' % access_token, 'Content-Type': 'application/json', }) except: logger.exception('Failed to post path: %s data: %s' % (path, post)) raise deferred.SingularTaskFailure() parsed_resp = json.loads(resp.content) if resp.status_code == 401: logger.info('unauthorized') yield self.handle_unauthorized(parsed_resp, post) raise deferred.PermanentTaskFailure() elif resp.status_code == 200: self.handle_success(parsed_resp, post) elif resp.status_code == 400: yield self.handle_bad_response(parsed_resp, post) raise deferred.PermanentTaskFailure() elif resp.status_code == 403: yield self.handle_forbidden(parsed_resp, post) raise deferred.PermanentTaskFailure() else: logger.warn("Couldn't post entry key=%s. Error: %s Post:%s", self.entry_key, parsed_resp, post) raise deferred.SingularTaskFailure()
def _get_pending_auth_db_transaction(): """Used internally to keep track of changes done in the transaction. Returns: Instance of _AuthDBTransaction (stored in the transaction context). """ # Use transaction context to store the object. Note that each transaction # retry gets its own new transaction context which is what we need, # see ndb/context.py, 'transaction' tasklet, around line 982 (for SDK 1.9.6). assert ndb.in_transaction() ctx = ndb.get_context() txn = getattr(ctx, '_auth_db_transaction', None) if txn: return txn # Prepare next AuthReplicationState (auth_db_rev +1). state = replication_state_key().get() if not state: primary_id = app_identity.get_application_id() if is_primary( ) else None state = AuthReplicationState(key=replication_state_key(), primary_id=primary_id, auth_db_rev=0) # Assert Primary or Standalone. Replicas can't increment auth db revision. if not is_primary() and state.primary_id: raise ValueError('Can\'t modify Auth DB on Replica') state.auth_db_rev += 1 state.modified_ts = utils.utcnow() # Store the state in the transaction context. Used in replicate_auth_db(...) # later. txn = _AuthDBTransaction(state) ctx._auth_db_transaction = txn return txn
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.testbed.init_user_stub() ndb.get_context().clear_cache( ) # Prevent data from leaking between tests app = webapp2.WSGIApplication([ RedirectRoute(r'/suggest/team/social_media', SuggestTeamSocialMediaController, 'suggest-team-social-media', strict_slash=True), ], debug=True) self.testapp = webtest.TestApp(app)
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.testbed.init_taskqueue_stub(root_path=".") self.event = Event( id="2012ct", event_short="ct", year=2012 ) self.old_match = Match( id="2012ct_qm1", alliances_json="""{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""", score_breakdown_json=json.dumps({ 'red': {'auto': 20, 'assist': 40, 'truss+catch': 20, 'teleop_goal+foul': 20}, 'blue': {'auto': 40, 'assist': 60, 'truss+catch': 10, 'teleop_goal+foul': 40}, }), comp_level="qm", event=self.event.key, year=2012, set_number=1, match_number=1, team_key_names=[u'frc69', u'frc571', u'frc176', u'frc3464', u'frc20', u'frc1073'], youtube_videos=[u'P3C2BOtL7e8', u'tst1', u'tst2', u'tst3'] ) self.new_match = Match( id="2012ct_qm1", alliances_json="""{"blue": {"score": 57, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": 74, "teams": ["frc69", "frc571", "frc176"]}}""", score_breakdown_json=json.dumps({ 'red': {'auto': 80, 'assist': 40, 'truss+catch': 20, 'teleop_goal+foul': 20}, 'blue': {'auto': 40, 'assist': 60, 'truss+catch': 10, 'teleop_goal+foul': 40}, }), comp_level="qm", event=self.event.key, year=2012, set_number=1, match_number=1, team_key_names=[u'frc69', u'frc571', u'frc176', u'frc3464', u'frc20', u'frc1073'], youtube_videos=[u'TqY324xLU4s', u'tst1', u'tst3', u'tst4'] )
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.testbed.init_user_stub() ndb.get_context().clear_cache( ) # Prevent data from leaking between tests app = webapp2.WSGIApplication([ RedirectRoute(r'/suggest/offseason', SuggestOffseasonEventController, 'request-apiwrite', strict_slash=True), ], debug=True) self.testapp = webtest.TestApp(app)
def get(self): ndb.get_context().set_cache_policy(lambda _: False) if not config.settings().mp.enabled: logging.info('MP support is disabled') return if config.settings().mp.server: new_server = config.settings().mp.server current_config = machine_provider.MachineProviderConfiguration( ).cached() if new_server != current_config.instance_url: logging.info('Updating Machine Provider server to %s', new_server) current_config.modify(updated_by='', instance_url=new_server) lease_management.ensure_entities_exist() lease_management.drain_excess()
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.account = Account.get_or_insert( "123", email="*****@*****.**", registered=True) self.account.put() event = Event(id="2016test", name="Test Event", event_short="Test Event", year=2016, event_type_enum=EventType.OFFSEASON) event.put() self.match = Match(id="2016test_f1m1", event=ndb.Key(Event, "2016test"), year=2016, comp_level="f", set_number=1, match_number=1, alliances_json='') self.match.put()
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_taskqueue_stub(root_path=".") self.testbed.init_memcache_stub() ndb.get_context().clear_cache( ) # Prevent data from leaking between tests load_fixture('test_data/2016cama_no_surrogate.json', kind={ 'Event': Event, 'Match': Match }, post_processor=self.eventKeyAdder) self.event = Event.get_by_id('2016cama') self.assertIsNotNone(self.event)
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests account = Account.get_or_insert( "123", email="*****@*****.**", registered=True).put() Suggestion( author=account, review_state=Suggestion.REVIEW_PENDING, target_key="2012cmp", target_model="event").put()
def setUp(self): app = webapp2.WSGIApplication([webapp2.Route(r'/<team_key:>/<year:>', ApiTeamMediaController, methods=['GET'])], debug=True) self.testapp = webtest.TestApp(app) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_urlfetch_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.testbed.init_taskqueue_stub(root_path=".") self.team = Team( id="frc254", name="very long name", team_number=254, nickname="Teh Chezy Pofs", city="Greenville", state_prov="SC", country="USA" ) self.team.put() self.cdmedia = Media( key=ndb.Key('Media', 'cdphotothread_39894'), details_json=u'{"image_partial": "fe3/fe38d320428adf4f51ac969efb3db32c_l.jpg"}', foreign_key=u'39894', media_type_enum=1, references=[ndb.Key('Team', 'frc254')], year=2014 ) self.cdmedia.put() self.cddetails = dict() self.cddetails["image_partial"] = "fe3/fe38d320428adf4f51ac969efb3db32c_l.jpg" self.ytmedia = Media( key=ndb.Key('Media', 'youtube_aFZy8iibMD0'), details_json=None, foreign_key=u'aFZy8iibMD0', media_type_enum=0, references=[ndb.Key('Team', 'frc254')], year=2014 ) self.ytmedia.put()
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() router = Router() router.register('user', DynamicUserHandler) app = webapp2.WSGIApplication(router.urls) self.testapp = webtest.TestApp(app) self.entity = UserModel(email='*****@*****.**', first_name='admin', last_name='restae', age=28) self.entity.put()
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.addCleanup(self.testbed.deactivate) self.testbed.init_datastore_v3_stub() self.testbed.init_mail_stub() self.testbed.init_memcache_stub() self.testbed.init_taskqueue_stub(root_path=_QUEUE_YAML_DIR) self.testbed.init_user_stub() self.testbed.init_urlfetch_stub() ndb.get_context().clear_cache() self.mail_stub = self.testbed.get_stub(testbed.MAIL_SERVICE_NAME) self.mock_get_request = None self._PatchIsInternalUser() datastore_hooks.InstallHooks()
def setUp(self): self.testbed = testbed.Testbed() # Then activate the testbed, which prepares the service stubs for use. self.testbed.activate() # Next, declare which service stubs you want to use. self.testbed.init_datastore_v3_stub() self.testbed.init_search_stub(enable=True) # Clear ndb's in-context cache between tests. # This prevents data from leaking between tests. # Alternatively, you could disable caching by # using ndb.get_context().set_cache_policy(False) ndb.get_context().clear_cache() self.document_id = u'54357456' self.index = search.Index(name=_INDEX_NAME)
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_all_stubs() ndb.get_context().clear_cache() sliver_tool_fetcher_datastore_patch = mock.patch.object( sliver_tool_fetcher, 'SliverToolFetcherDatastore', autospec=True) self.addCleanup(sliver_tool_fetcher_datastore_patch.stop) sliver_tool_fetcher_datastore_patch.start() sliver_tool_fetcher_memcache_patch = mock.patch.object( sliver_tool_fetcher, 'SliverToolFetcherMemcache', autospec=True) self.addCleanup(sliver_tool_fetcher_memcache_patch.stop) sliver_tool_fetcher_memcache_patch.start() self.fetcher = sliver_tool_fetcher.SliverToolFetcher()
def setUp(self): # First, create an instance of the Testbed class. self.testbed = testbed.Testbed() # Then activate the testbed, which prepares the service stubs for use. self.testbed.activate() # Next, declare which service stubs you want to use. self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.good_data = { "email": "*****@*****.**", "first_name": "Alex", "last_name": "Alex", "password": "******", "repeat_password": "******" } ndb.get_context().clear_cache() self.signu_up_endpoint = '/_ah/api/user/v1/signup'
def _import_subreddit_async(subreddit): context = ndb.get_context() try: result = yield context.urlfetch( method=urlfetch.GET, url='https://www.reddit.com/r/%s/hot.json?limit=50' % (subreddit,)) data = json.loads(result.content) posts = data['data']['children'] except urlfetch.ConnectionClosedError: logging.warning('Failed to connect to www.reddit.com') raise ndb.Return([]) except Exception as e: logging.exception('Fetching /r/%s data failed.' % (subreddit,)) raise ndb.Return([]) futures = [] for post in posts: post_id = post['data']['id'] media = post['data']['media'] if not media or 'oembed' not in media: logging.debug('Skipping %s.media: %r', post_id, media) continue info = media['oembed'] provider = info['provider_name'] if provider in ('BandCamp', 'Imgur', 'SoundCloud', 'Spotify'): logging.debug('Skipping %s.media for %s', post_id, provider) continue if provider == 'Streamable': m = re.search(r'&url=https%3A%2F%2Fstreamable\.com%2F([0-9a-z]+)&', info['html']) if not m: logging.debug('Failed to get Streamable id from %s.media %r', post_id, media) continue title = html_parser.unescape(post['data']['title']) url = 'https://streamable.com/%s' % (m.group(1),) elif provider == 'YouTube': m = re.search(r'https(?:://|%3A%2F%2F)www\.youtube\.com(?:/|%2F)embed(?:/|%2F)([a-zA-Z0-9_-]+)', info['html']) if not m: logging.debug('Failed to get YouTube id from %s.media %r', post_id, media) continue title = html_parser.unescape(info['title']) url = 'https://www.youtube.com/watch?v=%s' % (m.group(1),) else: logging.debug('Skipping %s.media for %s: %r', post_id, provider, media) continue future = utils.get_or_create_content_async( url=url, thumb_url=info['thumbnail_url'], title=title, tags=['original', 'is suggestion'], allow_restricted_tags=True) futures.append(future) content_list = [] for f in futures: try: content = yield f except: logging.exception('Failed to create content.') content_list.append(content) raise ndb.Return(content_list)
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache( ) # Prevent data from leaking between tests self.event_nyny = Event(id="2016nyny", name="NYC Regional", event_type_enum=EventType.REGIONAL, short_name="NYC", event_short="nyny", year=2016, end_date=datetime(2016, 03, 27), official=True, start_date=datetime(2016, 03, 24), timezone_id="America/New_York") self.event_nyny.put() self.event_micmp = Event(id="2016micmp", name="Michigan District Champs", event_type_enum=EventType.DISTRICT_CMP, short_name="Michigan", event_short="micmp", year=2016, end_date=datetime(2016, 03, 27), official=True, start_date=datetime(2016, 03, 24), timezone_id="America/New_York", playoff_type=PlayoffType.BRACKET_16_TEAM) self.event_micmp.put() self.event_2018week0 = Event(id="2018week0", name="Week 0", event_type_enum=EventType.PRESEASON, short_name="Week 0", event_short="week0", year=2018, end_date=datetime(2018, 02, 17), official=True, start_date=datetime(2018, 02, 17), timezone_id="America/New_York", playoff_type=PlayoffType.BRACKET_8_TEAM) self.event_2018week0.put()
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_taskqueue_stub(root_path=".") self.testbed.init_memcache_stub() ndb.get_context().clear_cache( ) # Prevent data from leaking between tests self.maxDiff = None self.old_event = Event( id="2011ct", end_date=datetime.datetime(2011, 4, 2, 0, 0), event_short="ct", event_type_enum=EventType.REGIONAL, district_key=None, first_eid="5561", name="Northeast Utilities FIRST Connecticut Regional", start_date=datetime.datetime(2011, 3, 31, 0, 0), year=2011, venue_address= "Connecticut Convention Center\r\n100 Columbus Blvd\r\nHartford, CT 06103\r\nUSA", website="http://www.ctfirst.org/ctr", ) self.new_event = Event( id="2011ct", end_date=datetime.datetime(2011, 4, 2, 0, 0), event_short="ct", event_type_enum=EventType.REGIONAL, district_key=None, first_eid="5561", name="Northeast Utilities FIRST Connecticut Regional", start_date=datetime.datetime(2011, 3, 31, 0, 0), year=2011, venue_address= "Connecticut Convention Center\r\n100 Columbus Blvd\r\nHartford, CT 06103\r\nUSA", website="http://www.ctfirst.org/ctr", facebook_eid="7", webcast_json=json.dumps([{ 'type': 'ustream', 'channel': 'foo' }]), )
def attempts_for_interval(begin, end): # pragma: no cover finished_in_interval = Record.query().filter(Record.tags == TAG_STOP, Record.timestamp >= begin, Record.timestamp < end) finish_timestamps = {} for record in finished_in_interval: if all(i in record.fields for i in ('project', 'issue', 'patchset')): key = ( record.fields['project'], record.fields['issue'], record.fields['patchset'], ) finish_timestamps.setdefault(key, []).append(record.timestamp) for key in finish_timestamps: # NDB seems to cache records beyond the soft memory limit. # Force a cache clear between each patchset analysis run to avoid getting # terminated by Appengine. ndb.get_context().clear_cache() last_finish_timestamp = max(finish_timestamps[key]) project, issue, patchset = key interval_query = Record.query().order(Record.timestamp).filter( Record.timestamp <= last_finish_timestamp, Record.tags == TAG_PROJECT % project, Record.tags == TAG_ISSUE % issue, Record.tags == TAG_PATCHSET % patchset) all_attempts = [] interval_attempts = [] attempt = None for record in interval_query: if TAG_START in record.tags: attempt = [] if attempt != None: attempt.append(record) if TAG_STOP in record.tags: if record.timestamp >= begin: interval_attempts.append(attempt) all_attempts.append(attempt) attempt = None if len(all_attempts) == 0: logging.warning( 'No attempts found for %s issue %s patchset %s at %s' % (project, issue, patchset, begin)) continue yield project, issue, patchset, all_attempts, interval_attempts
def delete_async(self, namespace=None, context=None): context = context or ndb.get_context() result = yield [ context.memcache_delete(self.get_key(i)) for i in range(self.shards) ] raise ndb.Return(all(result))
def replicate_auth_db(): """Increments auth_db_rev by one. It is a signal that Auth DB should be replicated to Replicas. If called from inside a transaction, it inherits it and updates auth_db_rev only once (even if called multiple times during that transaction). Should only be called for services in Standalone or Primary modes. Will raise ValueError if called on Replica. When called for service in Standalone mode, will update auth_db_rev but won't kick any replication. For services in Primary mode will also initiate replication by calling callback set in 'configure_as_primary'. WARNING: This function relies on a valid transaction context. NDB hooks and asynchronous operations are known to be buggy in this regard: NDB hook for an async operation in a transaction may be called with a wrong context (main event loop context instead of transaction context). One way to work around that is to monkey patch NDB (as done here: https://goo.gl/1yASjL). Another is to not use hooks at all. There's no way to differentiate between sync and async modes of an NDB operation from inside a hook. And without a strict assert it's very easy to forget about "Do not use put_async" warning. For that reason _post_put_hook is NOT used and replicate_auth_db() should be called explicitly whenever relevant part of root_key() entity group is updated. """ def increment_revision_and_update_replicas(): """Does the actual job, called inside a transaction.""" # Update auth_db_rev. replication_state_key() is in same group as root_key. state = replication_state_key().get() if not state: primary_id = app_identity.get_application_id() if is_primary() else None state = AuthReplicationState( key=replication_state_key(), primary_id=primary_id, auth_db_rev=0) # Assert Primary or Standalone. Replicas can't increment auth db revision. if not is_primary() and state.primary_id: raise ValueError('Can\'t modify Auth DB on Replica') state.auth_db_rev += 1 state.modified_ts = utils.utcnow() state.put() # Only Primary does active replication. if is_primary(): _replication_callback(state) # If not in a transaction, start a new one. if not ndb.in_transaction(): ndb.transaction(increment_revision_and_update_replicas) return # If in a transaction, use transaction context to store "already did this" # flag. Note that each transaction retry gets its own new transaction context, # see ndb/context.py, 'transaction' tasklet, around line 982 (for SDK 1.9.6). ctx = ndb.get_context() if not getattr(ctx, '_auth_db_inc_called', False): increment_revision_and_update_replicas() ctx._auth_db_inc_called = True