def xxxNOT_WORKING_YETxxxtestUpdateDirtyWithResultTimeKey(self): ua_string = ('Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.6) ' 'Gecko/2009011912 Firefox/3.0.6') result_parent = ResultParent.AddResult(self.test_set, '12.2.2.11', ua_string, 'apple=0,banana=99,coconut=101', skip_update_dirty=True) skip_rt, update_rt, next_rt = result_parent.GetResultTimes() # Make so there is only one ResultTime to schedule after first update. skip_rt.dirty = False skip_rt.put() self.mox.StubOutWithMock(ResultTime, 'UpdateStats') self.mox.StubOutWithMock(ResultParent, 'ScheduleUpdateDirty') ResultTime.UpdateStats(update_rt) ResultParent.ScheduleUpdateDirty(next_rt.key()) self.mox.ReplayAll() response = self.client.get('/admin/update_dirty/some_category', {'result_time_key': update_rt.key()}, **mock_data.UNIT_TEST_UA) self.mox.VerifyAll()
def MakeDirty(request): """For testing purposes, make some tests dirty.""" query = ResultParent.all() result_times = [] for result_parent in query.fetch(10): for result_time in ResultTime.all().ancestor(result_parent).fetch(1000): result_time.dirty = True result_times.append(result_time) db.put(result_times) return http.HttpResponse('Made %s result_times dirty' % len(result_times))
def MakeDirty(request): """For testing purposes, make some tests dirty.""" query = ResultParent.all() result_times = [] for result_parent in query.fetch(10): for result_time in ResultTime.all().ancestor(result_parent).fetch( 1000): result_time.dirty = True result_times.append(result_time) db.put(result_times) return http.HttpResponse('Made %s result_times dirty' % len(result_times))
def UpdateDirty(request): """Updates any dirty tests, adding its score to the appropriate ranker.""" logging.debug('UpdateDirty start.') task_name_prefix = request.REQUEST.get('task_name_prefix', '') result_time_key = request.REQUEST.get('result_time_key') category = request.REQUEST.get('category') count = int(request.REQUEST.get('count', 0)) if result_time_key: result_time = ResultTime.get(result_time_key) try: ResultTime.UpdateStats(result_time) except: logging.info('UpdateStats: %s:%s' % (sys.exc_type, sys.exc_value)) result_parent_key = result_time.parent_key() else: result_parent_key = request.REQUEST.get('result_parent_key') if result_parent_key: result_parent_key = db.Key(result_parent_key) else: UpdateOldDirty() return http.HttpResponse('Done scheduling old results.') # Create a task for the next dirty ResultTime to update. dirty_query = ResultTime.all(keys_only=True) dirty_query.filter('dirty =', True) dirty_query.ancestor(result_parent_key) next_result_time_key = dirty_query.get() if next_result_time_key: logging.debug('Schedule next ResultTime: %s', next_result_time_key) ResultParent.ScheduleUpdateDirty(next_result_time_key, category, count + 1, task_name_prefix) else: logging.debug('Done with result_parent: %s', result_parent_key) ScheduleCategoryUpdate(result_parent_key) shardedcounter.increment(category) return http.HttpResponse('Done.')
def UpdateDirty(request): """Updates any dirty tests, adding its score to the appropriate ranker.""" logging.debug('UpdateDirty start.') task_name_prefix = request.REQUEST.get('task_name_prefix', '') result_time_key = request.REQUEST.get('result_time_key') category = request.REQUEST.get('category') count = int(request.REQUEST.get('count', 0)) if result_time_key: result_time = ResultTime.get(result_time_key) try: ResultTime.UpdateStats(result_time) except: logging.info('UpdateStats: %s:%s' % (sys.exc_type, sys.exc_value)) result_parent_key = result_time.parent_key() else: result_parent_key = request.REQUEST.get('result_parent_key') if result_parent_key: result_parent_key = db.Key(result_parent_key) else: UpdateOldDirty() return http.HttpResponse('Done scheduling old results.') # Create a task for the next dirty ResultTime to update. dirty_query = ResultTime.all(keys_only=True) dirty_query.filter('dirty =', True) dirty_query.ancestor(result_parent_key) next_result_time_key = dirty_query.get() if next_result_time_key: logging.debug('Schedule next ResultTime: %s', next_result_time_key) ResultParent.ScheduleUpdateDirty( next_result_time_key, category, count+1, task_name_prefix) else: logging.debug('Done with result_parent: %s', result_parent_key) ScheduleCategoryUpdate(result_parent_key) shardedcounter.increment(category) return http.HttpResponse('Done.')
def UpdateOldDirty(): """Update dirty queries from the past.""" num_scheduled = 0 seen_result_parent_keys = set() dirty_query = ResultTime.all(keys_only=True).filter('dirty =', True) for i, result_time_key in enumerate(dirty_query.fetch(500)): result_parent_key = result_time_key.parent() if result_parent_key not in seen_result_parent_keys: seen_result_parent_keys.add(result_parent_key) result_parent = ResultParent.get(result_parent_key) category = result_parent.category age = datetime.datetime.now() - result_parent.created if age.days > 0 or age.seconds > OLD_SECONDS: logging.info( 'Schedule old dirty:%d:%d: %s, age=%s, result_parent=%s, result_time=%s', i, num_scheduled, category, age, result_parent_key, result_time_key) if ResultParent.ScheduleUpdateDirty( result_time_key, category, count=-1): num_scheduled += 1 if num_scheduled == 10: break
def UpdateOldDirty(): """Update dirty queries from the past.""" num_scheduled = 0 seen_result_parent_keys = set() dirty_query = ResultTime.all(keys_only=True).filter('dirty =', True) for i, result_time_key in enumerate(dirty_query.fetch(500)): result_parent_key = result_time_key.parent() if result_parent_key not in seen_result_parent_keys: seen_result_parent_keys.add(result_parent_key) result_parent = ResultParent.get(result_parent_key) category = result_parent.category age = datetime.datetime.now() - result_parent.created if age.days > 0 or age.seconds > OLD_SECONDS: logging.info( 'Schedule old dirty:%d:%d: %s, age=%s, result_parent=%s, result_time=%s', i, num_scheduled, category, age, result_parent_key, result_time_key) if ResultParent.ScheduleUpdateDirty(result_time_key, category, count=-1): num_scheduled += 1 if num_scheduled == 10: break
def DataDump(request): """This is used by bin/data_dump.py to replicate the datastore.""" model = request.REQUEST.get('model') key_prefix = request.REQUEST.get('key_prefix', '') keys_list = request.REQUEST.get('keys') time_limit = int(request.REQUEST.get('time_limit', 3)) if keys_list: keys = ['%s%s' % (key_prefix, key) for key in keys_list.split(',')] else: return http.HttpResponseBadRequest('"keys" is a required parameter.') start_time = datetime.datetime.now() if model == 'ResultParent': query = pager.PagerQuery(ResultParent, keys_only=True) elif model == 'UserAgent': query = pager.PagerQuery(UserAgent) else: return http.HttpResponseBadRequest( 'model must be one of "ResultParent", "UserAgent".') data = [] error = None if model == 'ResultParent': result_time_query = ResultTime.gql('WHERE ANCESTOR IS :1') for result_parent_key in keys: if (datetime.datetime.now() - start_time).seconds > time_limit: error = 'Over time limit' break try: p = ResultParent.get(result_parent_key) except db.Timeout: error = 'db.Timeout: ResultParent' break if not p: data.append({ 'model_class': 'ResultParent', 'lost_key': result_parent_key, }) continue result_time_query.bind(p.key()) try: result_times = result_time_query.fetch(1000) except db.Timeout: error = 'db.Timeout: ResultTime' break row_data = [{ 'model_class': 'ResultParent', 'result_parent_key': result_parent_key, 'category': p.category, 'user_agent_key': str( ResultParent.user_agent.get_value_for_datastore(p)), 'ip': p.ip, 'user_id': p.user and p.user.user_id() or None, 'created': p.created and p.created.isoformat() or None, 'params_str': p.params_str, 'loader_id': hasattr(p, 'loader_id') and p.loader_id or None, }] is_dirty = False for result_time in result_times: if result_time.dirty: is_dirty = True break row_data.append({ 'model_class': 'ResultTime', 'result_time_key': str(result_time.key()), 'result_parent_key': str(result_parent_key), 'test': result_time.test, 'score': result_time.score, }) if is_dirty: data.append({'dirty_key': result_parent_key,}) else: data.extend(row_data) elif model == 'UserAgent': try: user_agents = UserAgent.get(keys) except db.Timeout: error = 'db.Timeout: UserAgent' else: for key, ua in zip(keys, user_agents): if ua: data.append({ 'model_class': 'UserAgent', 'user_agent_key': key, 'string': ua.string, 'family': ua.family, 'v1': ua.v1, 'v2': ua.v2, 'v3': ua.v3, 'confirmed': ua.confirmed, 'created': ua.created and ua.created.isoformat() or None, 'js_user_agent_string': (hasattr(ua, 'js_user_agent_string') and ua.js_user_agent_string or None), }) else: data.append({ 'model_class': 'UserAgent', 'lost_key': key, }) response_params = { 'data': data, } if error: response_params['error'] = error return http.HttpResponse(content=simplejson.dumps(response_params), content_type='application/json')